1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen = 1000;
31 module_param(cryptd_max_cpu_qlen, uint, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
34 static struct workqueue_struct *cryptd_wq;
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
43 * Protected by disabling BH to allow enqueueing from softinterrupt and
44 * dequeuing from kworker (cryptd_queue_worker()).
46 struct cryptd_cpu_queue __percpu *cpu_queue;
49 struct cryptd_instance_ctx {
50 struct crypto_spawn spawn;
51 struct cryptd_queue *queue;
54 struct skcipherd_instance_ctx {
55 struct crypto_skcipher_spawn spawn;
56 struct cryptd_queue *queue;
59 struct hashd_instance_ctx {
60 struct crypto_shash_spawn spawn;
61 struct cryptd_queue *queue;
64 struct aead_instance_ctx {
65 struct crypto_aead_spawn aead_spawn;
66 struct cryptd_queue *queue;
69 struct cryptd_skcipher_ctx {
71 struct crypto_skcipher *child;
74 struct cryptd_skcipher_request_ctx {
75 crypto_completion_t complete;
76 struct skcipher_request req;
79 struct cryptd_hash_ctx {
81 struct crypto_shash *child;
84 struct cryptd_hash_request_ctx {
85 crypto_completion_t complete;
86 struct shash_desc desc;
89 struct cryptd_aead_ctx {
91 struct crypto_aead *child;
94 struct cryptd_aead_request_ctx {
95 crypto_completion_t complete;
98 static void cryptd_queue_worker(struct work_struct *work);
100 static int cryptd_init_queue(struct cryptd_queue *queue,
101 unsigned int max_cpu_qlen)
104 struct cryptd_cpu_queue *cpu_queue;
106 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
107 if (!queue->cpu_queue)
109 for_each_possible_cpu(cpu) {
110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
111 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
112 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
114 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
118 static void cryptd_fini_queue(struct cryptd_queue *queue)
121 struct cryptd_cpu_queue *cpu_queue;
123 for_each_possible_cpu(cpu) {
124 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
125 BUG_ON(cpu_queue->queue.qlen);
127 free_percpu(queue->cpu_queue);
130 static int cryptd_enqueue_request(struct cryptd_queue *queue,
131 struct crypto_async_request *request)
134 struct cryptd_cpu_queue *cpu_queue;
138 cpu_queue = this_cpu_ptr(queue->cpu_queue);
139 err = crypto_enqueue_request(&cpu_queue->queue, request);
141 refcnt = crypto_tfm_ctx(request->tfm);
146 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
148 if (!refcount_read(refcnt))
151 refcount_inc(refcnt);
159 /* Called in workqueue context, do one real cryption work (via
160 * req->complete) and reschedule itself if there are more work to
162 static void cryptd_queue_worker(struct work_struct *work)
164 struct cryptd_cpu_queue *cpu_queue;
165 struct crypto_async_request *req, *backlog;
167 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
169 * Only handle one request at a time to avoid hogging crypto workqueue.
172 backlog = crypto_get_backlog(&cpu_queue->queue);
173 req = crypto_dequeue_request(&cpu_queue->queue);
180 backlog->complete(backlog, -EINPROGRESS);
181 req->complete(req, 0);
183 if (cpu_queue->queue.qlen)
184 queue_work(cryptd_wq, &cpu_queue->work);
187 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
189 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
190 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
194 static void cryptd_type_and_mask(struct crypto_attr_type *algt,
195 u32 *type, u32 *mask)
198 * cryptd is allowed to wrap internal algorithms, but in that case the
199 * resulting cryptd instance will be marked as internal as well.
201 *type = algt->type & CRYPTO_ALG_INTERNAL;
202 *mask = algt->mask & CRYPTO_ALG_INTERNAL;
204 /* No point in cryptd wrapping an algorithm that's already async. */
205 *mask |= CRYPTO_ALG_ASYNC;
207 *mask |= crypto_algt_inherited_mask(algt);
210 static int cryptd_init_instance(struct crypto_instance *inst,
211 struct crypto_alg *alg)
213 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
215 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
216 return -ENAMETOOLONG;
218 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
220 inst->alg.cra_priority = alg->cra_priority + 50;
221 inst->alg.cra_blocksize = alg->cra_blocksize;
222 inst->alg.cra_alignmask = alg->cra_alignmask;
227 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
228 const u8 *key, unsigned int keylen)
230 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
231 struct crypto_skcipher *child = ctx->child;
233 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
234 crypto_skcipher_set_flags(child,
235 crypto_skcipher_get_flags(parent) &
236 CRYPTO_TFM_REQ_MASK);
237 return crypto_skcipher_setkey(child, key, keylen);
240 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
242 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
243 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
244 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
245 int refcnt = refcount_read(&ctx->refcnt);
248 rctx->complete(&req->base, err);
251 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
252 crypto_free_skcipher(tfm);
255 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
258 struct skcipher_request *req = skcipher_request_cast(base);
259 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
260 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
261 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
262 struct skcipher_request *subreq = &rctx->req;
263 struct crypto_skcipher *child = ctx->child;
265 if (unlikely(err == -EINPROGRESS))
268 skcipher_request_set_tfm(subreq, child);
269 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
271 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
274 err = crypto_skcipher_encrypt(subreq);
275 skcipher_request_zero(subreq);
277 req->base.complete = rctx->complete;
280 cryptd_skcipher_complete(req, err);
283 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
286 struct skcipher_request *req = skcipher_request_cast(base);
287 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
288 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
289 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
290 struct skcipher_request *subreq = &rctx->req;
291 struct crypto_skcipher *child = ctx->child;
293 if (unlikely(err == -EINPROGRESS))
296 skcipher_request_set_tfm(subreq, child);
297 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
299 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
302 err = crypto_skcipher_decrypt(subreq);
303 skcipher_request_zero(subreq);
305 req->base.complete = rctx->complete;
308 cryptd_skcipher_complete(req, err);
311 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
312 crypto_completion_t compl)
314 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
315 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
316 struct cryptd_queue *queue;
318 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
319 rctx->complete = req->base.complete;
320 req->base.complete = compl;
322 return cryptd_enqueue_request(queue, &req->base);
325 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
327 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
330 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
332 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
335 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
337 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
338 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
339 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
340 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
341 struct crypto_skcipher *cipher;
343 cipher = crypto_spawn_skcipher(spawn);
345 return PTR_ERR(cipher);
348 crypto_skcipher_set_reqsize(
349 tfm, sizeof(struct cryptd_skcipher_request_ctx) +
350 crypto_skcipher_reqsize(cipher));
354 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
356 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
358 crypto_free_skcipher(ctx->child);
361 static void cryptd_skcipher_free(struct skcipher_instance *inst)
363 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
365 crypto_drop_skcipher(&ctx->spawn);
369 static int cryptd_create_skcipher(struct crypto_template *tmpl,
371 struct crypto_attr_type *algt,
372 struct cryptd_queue *queue)
374 struct skcipherd_instance_ctx *ctx;
375 struct skcipher_instance *inst;
376 struct skcipher_alg *alg;
381 cryptd_type_and_mask(algt, &type, &mask);
383 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
387 ctx = skcipher_instance_ctx(inst);
390 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
391 crypto_attr_alg_name(tb[1]), type, mask);
395 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
396 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
400 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
401 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
402 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
403 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
404 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
405 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
407 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
409 inst->alg.init = cryptd_skcipher_init_tfm;
410 inst->alg.exit = cryptd_skcipher_exit_tfm;
412 inst->alg.setkey = cryptd_skcipher_setkey;
413 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
414 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
416 inst->free = cryptd_skcipher_free;
418 err = skcipher_register_instance(tmpl, inst);
421 cryptd_skcipher_free(inst);
426 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
428 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
429 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
430 struct crypto_shash_spawn *spawn = &ictx->spawn;
431 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
432 struct crypto_shash *hash;
434 hash = crypto_spawn_shash(spawn);
436 return PTR_ERR(hash);
439 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
440 sizeof(struct cryptd_hash_request_ctx) +
441 crypto_shash_descsize(hash));
445 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
447 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
449 crypto_free_shash(ctx->child);
452 static int cryptd_hash_setkey(struct crypto_ahash *parent,
453 const u8 *key, unsigned int keylen)
455 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
456 struct crypto_shash *child = ctx->child;
458 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
459 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
460 CRYPTO_TFM_REQ_MASK);
461 return crypto_shash_setkey(child, key, keylen);
464 static int cryptd_hash_enqueue(struct ahash_request *req,
465 crypto_completion_t compl)
467 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
468 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
469 struct cryptd_queue *queue =
470 cryptd_get_queue(crypto_ahash_tfm(tfm));
472 rctx->complete = req->base.complete;
473 req->base.complete = compl;
475 return cryptd_enqueue_request(queue, &req->base);
478 static void cryptd_hash_complete(struct ahash_request *req, int err)
480 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
481 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
482 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
483 int refcnt = refcount_read(&ctx->refcnt);
486 rctx->complete(&req->base, err);
489 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
490 crypto_free_ahash(tfm);
493 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
495 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
496 struct crypto_shash *child = ctx->child;
497 struct ahash_request *req = ahash_request_cast(req_async);
498 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
499 struct shash_desc *desc = &rctx->desc;
501 if (unlikely(err == -EINPROGRESS))
506 err = crypto_shash_init(desc);
508 req->base.complete = rctx->complete;
511 cryptd_hash_complete(req, err);
514 static int cryptd_hash_init_enqueue(struct ahash_request *req)
516 return cryptd_hash_enqueue(req, cryptd_hash_init);
519 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
521 struct ahash_request *req = ahash_request_cast(req_async);
522 struct cryptd_hash_request_ctx *rctx;
524 rctx = ahash_request_ctx(req);
526 if (unlikely(err == -EINPROGRESS))
529 err = shash_ahash_update(req, &rctx->desc);
531 req->base.complete = rctx->complete;
534 cryptd_hash_complete(req, err);
537 static int cryptd_hash_update_enqueue(struct ahash_request *req)
539 return cryptd_hash_enqueue(req, cryptd_hash_update);
542 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
544 struct ahash_request *req = ahash_request_cast(req_async);
545 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
547 if (unlikely(err == -EINPROGRESS))
550 err = crypto_shash_final(&rctx->desc, req->result);
552 req->base.complete = rctx->complete;
555 cryptd_hash_complete(req, err);
558 static int cryptd_hash_final_enqueue(struct ahash_request *req)
560 return cryptd_hash_enqueue(req, cryptd_hash_final);
563 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
565 struct ahash_request *req = ahash_request_cast(req_async);
566 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
568 if (unlikely(err == -EINPROGRESS))
571 err = shash_ahash_finup(req, &rctx->desc);
573 req->base.complete = rctx->complete;
576 cryptd_hash_complete(req, err);
579 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
581 return cryptd_hash_enqueue(req, cryptd_hash_finup);
584 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
586 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
587 struct crypto_shash *child = ctx->child;
588 struct ahash_request *req = ahash_request_cast(req_async);
589 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
590 struct shash_desc *desc = &rctx->desc;
592 if (unlikely(err == -EINPROGRESS))
597 err = shash_ahash_digest(req, desc);
599 req->base.complete = rctx->complete;
602 cryptd_hash_complete(req, err);
605 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
607 return cryptd_hash_enqueue(req, cryptd_hash_digest);
610 static int cryptd_hash_export(struct ahash_request *req, void *out)
612 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
614 return crypto_shash_export(&rctx->desc, out);
617 static int cryptd_hash_import(struct ahash_request *req, const void *in)
619 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
620 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
621 struct shash_desc *desc = cryptd_shash_desc(req);
623 desc->tfm = ctx->child;
625 return crypto_shash_import(desc, in);
628 static void cryptd_hash_free(struct ahash_instance *inst)
630 struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
632 crypto_drop_shash(&ctx->spawn);
636 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
637 struct crypto_attr_type *algt,
638 struct cryptd_queue *queue)
640 struct hashd_instance_ctx *ctx;
641 struct ahash_instance *inst;
642 struct shash_alg *alg;
647 cryptd_type_and_mask(algt, &type, &mask);
649 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
653 ctx = ahash_instance_ctx(inst);
656 err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
657 crypto_attr_alg_name(tb[1]), type, mask);
660 alg = crypto_spawn_shash_alg(&ctx->spawn);
662 err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
666 inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
667 (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
668 CRYPTO_ALG_OPTIONAL_KEY));
669 inst->alg.halg.digestsize = alg->digestsize;
670 inst->alg.halg.statesize = alg->statesize;
671 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
673 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
674 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
676 inst->alg.init = cryptd_hash_init_enqueue;
677 inst->alg.update = cryptd_hash_update_enqueue;
678 inst->alg.final = cryptd_hash_final_enqueue;
679 inst->alg.finup = cryptd_hash_finup_enqueue;
680 inst->alg.export = cryptd_hash_export;
681 inst->alg.import = cryptd_hash_import;
682 if (crypto_shash_alg_has_setkey(alg))
683 inst->alg.setkey = cryptd_hash_setkey;
684 inst->alg.digest = cryptd_hash_digest_enqueue;
686 inst->free = cryptd_hash_free;
688 err = ahash_register_instance(tmpl, inst);
691 cryptd_hash_free(inst);
696 static int cryptd_aead_setkey(struct crypto_aead *parent,
697 const u8 *key, unsigned int keylen)
699 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
700 struct crypto_aead *child = ctx->child;
702 return crypto_aead_setkey(child, key, keylen);
705 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
706 unsigned int authsize)
708 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
709 struct crypto_aead *child = ctx->child;
711 return crypto_aead_setauthsize(child, authsize);
714 static void cryptd_aead_crypt(struct aead_request *req,
715 struct crypto_aead *child,
717 int (*crypt)(struct aead_request *req))
719 struct cryptd_aead_request_ctx *rctx;
720 struct cryptd_aead_ctx *ctx;
721 crypto_completion_t compl;
722 struct crypto_aead *tfm;
725 rctx = aead_request_ctx(req);
726 compl = rctx->complete;
728 tfm = crypto_aead_reqtfm(req);
730 if (unlikely(err == -EINPROGRESS))
732 aead_request_set_tfm(req, child);
736 ctx = crypto_aead_ctx(tfm);
737 refcnt = refcount_read(&ctx->refcnt);
740 compl(&req->base, err);
743 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
744 crypto_free_aead(tfm);
747 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
749 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
750 struct crypto_aead *child = ctx->child;
751 struct aead_request *req;
753 req = container_of(areq, struct aead_request, base);
754 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
757 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
759 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
760 struct crypto_aead *child = ctx->child;
761 struct aead_request *req;
763 req = container_of(areq, struct aead_request, base);
764 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
767 static int cryptd_aead_enqueue(struct aead_request *req,
768 crypto_completion_t compl)
770 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
771 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
772 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
774 rctx->complete = req->base.complete;
775 req->base.complete = compl;
776 return cryptd_enqueue_request(queue, &req->base);
779 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
781 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
784 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
786 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
789 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
791 struct aead_instance *inst = aead_alg_instance(tfm);
792 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
793 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
794 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
795 struct crypto_aead *cipher;
797 cipher = crypto_spawn_aead(spawn);
799 return PTR_ERR(cipher);
802 crypto_aead_set_reqsize(
803 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
804 crypto_aead_reqsize(cipher)));
808 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
810 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
811 crypto_free_aead(ctx->child);
814 static void cryptd_aead_free(struct aead_instance *inst)
816 struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
818 crypto_drop_aead(&ctx->aead_spawn);
822 static int cryptd_create_aead(struct crypto_template *tmpl,
824 struct crypto_attr_type *algt,
825 struct cryptd_queue *queue)
827 struct aead_instance_ctx *ctx;
828 struct aead_instance *inst;
829 struct aead_alg *alg;
834 cryptd_type_and_mask(algt, &type, &mask);
836 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
840 ctx = aead_instance_ctx(inst);
843 err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
844 crypto_attr_alg_name(tb[1]), type, mask);
848 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
849 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
853 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
854 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
855 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
857 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
858 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
860 inst->alg.init = cryptd_aead_init_tfm;
861 inst->alg.exit = cryptd_aead_exit_tfm;
862 inst->alg.setkey = cryptd_aead_setkey;
863 inst->alg.setauthsize = cryptd_aead_setauthsize;
864 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
865 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
867 inst->free = cryptd_aead_free;
869 err = aead_register_instance(tmpl, inst);
872 cryptd_aead_free(inst);
877 static struct cryptd_queue queue;
879 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
881 struct crypto_attr_type *algt;
883 algt = crypto_get_attr_type(tb);
885 return PTR_ERR(algt);
887 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
888 case CRYPTO_ALG_TYPE_SKCIPHER:
889 return cryptd_create_skcipher(tmpl, tb, algt, &queue);
890 case CRYPTO_ALG_TYPE_HASH:
891 return cryptd_create_hash(tmpl, tb, algt, &queue);
892 case CRYPTO_ALG_TYPE_AEAD:
893 return cryptd_create_aead(tmpl, tb, algt, &queue);
899 static struct crypto_template cryptd_tmpl = {
901 .create = cryptd_create,
902 .module = THIS_MODULE,
905 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
908 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
909 struct cryptd_skcipher_ctx *ctx;
910 struct crypto_skcipher *tfm;
912 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
913 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
914 return ERR_PTR(-EINVAL);
916 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
918 return ERR_CAST(tfm);
920 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
921 crypto_free_skcipher(tfm);
922 return ERR_PTR(-EINVAL);
925 ctx = crypto_skcipher_ctx(tfm);
926 refcount_set(&ctx->refcnt, 1);
928 return container_of(tfm, struct cryptd_skcipher, base);
930 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
932 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
934 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
938 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
940 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
942 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
944 return refcount_read(&ctx->refcnt) - 1;
946 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
948 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
950 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
952 if (refcount_dec_and_test(&ctx->refcnt))
953 crypto_free_skcipher(&tfm->base);
955 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
957 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
960 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
961 struct cryptd_hash_ctx *ctx;
962 struct crypto_ahash *tfm;
964 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
965 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
966 return ERR_PTR(-EINVAL);
967 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
969 return ERR_CAST(tfm);
970 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
971 crypto_free_ahash(tfm);
972 return ERR_PTR(-EINVAL);
975 ctx = crypto_ahash_ctx(tfm);
976 refcount_set(&ctx->refcnt, 1);
978 return __cryptd_ahash_cast(tfm);
980 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
982 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
984 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
988 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
990 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
992 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
995 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
997 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
999 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1001 return refcount_read(&ctx->refcnt) - 1;
1003 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1005 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1007 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1009 if (refcount_dec_and_test(&ctx->refcnt))
1010 crypto_free_ahash(&tfm->base);
1012 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1014 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1017 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1018 struct cryptd_aead_ctx *ctx;
1019 struct crypto_aead *tfm;
1021 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1022 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1023 return ERR_PTR(-EINVAL);
1024 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1026 return ERR_CAST(tfm);
1027 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1028 crypto_free_aead(tfm);
1029 return ERR_PTR(-EINVAL);
1032 ctx = crypto_aead_ctx(tfm);
1033 refcount_set(&ctx->refcnt, 1);
1035 return __cryptd_aead_cast(tfm);
1037 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1039 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1041 struct cryptd_aead_ctx *ctx;
1042 ctx = crypto_aead_ctx(&tfm->base);
1045 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1047 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1049 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1051 return refcount_read(&ctx->refcnt) - 1;
1053 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1055 void cryptd_free_aead(struct cryptd_aead *tfm)
1057 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1059 if (refcount_dec_and_test(&ctx->refcnt))
1060 crypto_free_aead(&tfm->base);
1062 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1064 static int __init cryptd_init(void)
1068 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1073 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1075 goto err_destroy_wq;
1077 err = crypto_register_template(&cryptd_tmpl);
1079 goto err_fini_queue;
1084 cryptd_fini_queue(&queue);
1086 destroy_workqueue(cryptd_wq);
1090 static void __exit cryptd_exit(void)
1092 destroy_workqueue(cryptd_wq);
1093 cryptd_fini_queue(&queue);
1094 crypto_unregister_template(&cryptd_tmpl);
1097 subsys_initcall(cryptd_init);
1098 module_exit(cryptd_exit);
1100 MODULE_LICENSE("GPL");
1101 MODULE_DESCRIPTION("Software async crypto daemon");
1102 MODULE_ALIAS_CRYPTO("cryptd");