1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen = 1000;
31 module_param(cryptd_max_cpu_qlen, uint, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
34 static struct workqueue_struct *cryptd_wq;
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
43 * Protected by disabling BH to allow enqueueing from softinterrupt and
44 * dequeuing from kworker (cryptd_queue_worker()).
46 struct cryptd_cpu_queue __percpu *cpu_queue;
49 struct cryptd_instance_ctx {
50 struct crypto_spawn spawn;
51 struct cryptd_queue *queue;
54 struct skcipherd_instance_ctx {
55 struct crypto_skcipher_spawn spawn;
56 struct cryptd_queue *queue;
59 struct hashd_instance_ctx {
60 struct crypto_shash_spawn spawn;
61 struct cryptd_queue *queue;
64 struct aead_instance_ctx {
65 struct crypto_aead_spawn aead_spawn;
66 struct cryptd_queue *queue;
69 struct cryptd_skcipher_ctx {
71 struct crypto_skcipher *child;
74 struct cryptd_skcipher_request_ctx {
75 struct skcipher_request req;
78 struct cryptd_hash_ctx {
80 struct crypto_shash *child;
83 struct cryptd_hash_request_ctx {
84 crypto_completion_t complete;
86 struct shash_desc desc;
89 struct cryptd_aead_ctx {
91 struct crypto_aead *child;
94 struct cryptd_aead_request_ctx {
95 struct aead_request req;
98 static void cryptd_queue_worker(struct work_struct *work);
100 static int cryptd_init_queue(struct cryptd_queue *queue,
101 unsigned int max_cpu_qlen)
104 struct cryptd_cpu_queue *cpu_queue;
106 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
107 if (!queue->cpu_queue)
109 for_each_possible_cpu(cpu) {
110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
111 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
112 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
114 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
118 static void cryptd_fini_queue(struct cryptd_queue *queue)
121 struct cryptd_cpu_queue *cpu_queue;
123 for_each_possible_cpu(cpu) {
124 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
125 BUG_ON(cpu_queue->queue.qlen);
127 free_percpu(queue->cpu_queue);
130 static int cryptd_enqueue_request(struct cryptd_queue *queue,
131 struct crypto_async_request *request)
134 struct cryptd_cpu_queue *cpu_queue;
138 cpu_queue = this_cpu_ptr(queue->cpu_queue);
139 err = crypto_enqueue_request(&cpu_queue->queue, request);
141 refcnt = crypto_tfm_ctx(request->tfm);
146 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
148 if (!refcount_read(refcnt))
151 refcount_inc(refcnt);
159 /* Called in workqueue context, do one real cryption work (via
160 * req->complete) and reschedule itself if there are more work to
162 static void cryptd_queue_worker(struct work_struct *work)
164 struct cryptd_cpu_queue *cpu_queue;
165 struct crypto_async_request *req, *backlog;
167 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
169 * Only handle one request at a time to avoid hogging crypto workqueue.
172 backlog = crypto_get_backlog(&cpu_queue->queue);
173 req = crypto_dequeue_request(&cpu_queue->queue);
180 crypto_request_complete(backlog, -EINPROGRESS);
181 crypto_request_complete(req, 0);
183 if (cpu_queue->queue.qlen)
184 queue_work(cryptd_wq, &cpu_queue->work);
187 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
189 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
190 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
194 static void cryptd_type_and_mask(struct crypto_attr_type *algt,
195 u32 *type, u32 *mask)
198 * cryptd is allowed to wrap internal algorithms, but in that case the
199 * resulting cryptd instance will be marked as internal as well.
201 *type = algt->type & CRYPTO_ALG_INTERNAL;
202 *mask = algt->mask & CRYPTO_ALG_INTERNAL;
204 /* No point in cryptd wrapping an algorithm that's already async. */
205 *mask |= CRYPTO_ALG_ASYNC;
207 *mask |= crypto_algt_inherited_mask(algt);
210 static int cryptd_init_instance(struct crypto_instance *inst,
211 struct crypto_alg *alg)
213 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
215 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
216 return -ENAMETOOLONG;
218 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
220 inst->alg.cra_priority = alg->cra_priority + 50;
221 inst->alg.cra_blocksize = alg->cra_blocksize;
222 inst->alg.cra_alignmask = alg->cra_alignmask;
227 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
228 const u8 *key, unsigned int keylen)
230 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
231 struct crypto_skcipher *child = ctx->child;
233 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
234 crypto_skcipher_set_flags(child,
235 crypto_skcipher_get_flags(parent) &
236 CRYPTO_TFM_REQ_MASK);
237 return crypto_skcipher_setkey(child, key, keylen);
240 static struct skcipher_request *cryptd_skcipher_prepare(
241 struct skcipher_request *req, int err)
243 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
244 struct skcipher_request *subreq = &rctx->req;
245 struct cryptd_skcipher_ctx *ctx;
246 struct crypto_skcipher *child;
248 req->base.complete = subreq->base.complete;
249 req->base.data = subreq->base.data;
251 if (unlikely(err == -EINPROGRESS))
254 ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
257 skcipher_request_set_tfm(subreq, child);
258 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
260 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
266 static void cryptd_skcipher_complete(struct skcipher_request *req, int err,
267 crypto_completion_t complete)
269 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
272 struct skcipher_request *subreq = &rctx->req;
273 int refcnt = refcount_read(&ctx->refcnt);
276 skcipher_request_complete(req, err);
279 if (unlikely(err == -EINPROGRESS)) {
280 subreq->base.complete = req->base.complete;
281 subreq->base.data = req->base.data;
282 req->base.complete = complete;
283 req->base.data = req;
284 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt))
285 crypto_free_skcipher(tfm);
288 static void cryptd_skcipher_encrypt(void *data, int err)
290 struct skcipher_request *req = data;
291 struct skcipher_request *subreq;
293 subreq = cryptd_skcipher_prepare(req, err);
295 err = crypto_skcipher_encrypt(subreq);
297 cryptd_skcipher_complete(req, err, cryptd_skcipher_encrypt);
300 static void cryptd_skcipher_decrypt(void *data, int err)
302 struct skcipher_request *req = data;
303 struct skcipher_request *subreq;
305 subreq = cryptd_skcipher_prepare(req, err);
307 err = crypto_skcipher_decrypt(subreq);
309 cryptd_skcipher_complete(req, err, cryptd_skcipher_decrypt);
312 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
313 crypto_completion_t compl)
315 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
317 struct skcipher_request *subreq = &rctx->req;
318 struct cryptd_queue *queue;
320 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
321 subreq->base.complete = req->base.complete;
322 subreq->base.data = req->base.data;
323 req->base.complete = compl;
324 req->base.data = req;
326 return cryptd_enqueue_request(queue, &req->base);
329 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
331 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
334 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
336 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
339 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
341 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
342 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
343 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
344 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
345 struct crypto_skcipher *cipher;
347 cipher = crypto_spawn_skcipher(spawn);
349 return PTR_ERR(cipher);
352 crypto_skcipher_set_reqsize(
353 tfm, sizeof(struct cryptd_skcipher_request_ctx) +
354 crypto_skcipher_reqsize(cipher));
358 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
360 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
362 crypto_free_skcipher(ctx->child);
365 static void cryptd_skcipher_free(struct skcipher_instance *inst)
367 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
369 crypto_drop_skcipher(&ctx->spawn);
373 static int cryptd_create_skcipher(struct crypto_template *tmpl,
375 struct crypto_attr_type *algt,
376 struct cryptd_queue *queue)
378 struct skcipherd_instance_ctx *ctx;
379 struct skcipher_instance *inst;
380 struct skcipher_alg *alg;
385 cryptd_type_and_mask(algt, &type, &mask);
387 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
391 ctx = skcipher_instance_ctx(inst);
394 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
395 crypto_attr_alg_name(tb[1]), type, mask);
399 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
400 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
404 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
405 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
406 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
407 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
408 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
409 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
411 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
413 inst->alg.init = cryptd_skcipher_init_tfm;
414 inst->alg.exit = cryptd_skcipher_exit_tfm;
416 inst->alg.setkey = cryptd_skcipher_setkey;
417 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
418 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
420 inst->free = cryptd_skcipher_free;
422 err = skcipher_register_instance(tmpl, inst);
425 cryptd_skcipher_free(inst);
430 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
432 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
433 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
434 struct crypto_shash_spawn *spawn = &ictx->spawn;
435 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
436 struct crypto_shash *hash;
438 hash = crypto_spawn_shash(spawn);
440 return PTR_ERR(hash);
443 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
444 sizeof(struct cryptd_hash_request_ctx) +
445 crypto_shash_descsize(hash));
449 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
451 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
453 crypto_free_shash(ctx->child);
456 static int cryptd_hash_setkey(struct crypto_ahash *parent,
457 const u8 *key, unsigned int keylen)
459 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
460 struct crypto_shash *child = ctx->child;
462 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
463 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
464 CRYPTO_TFM_REQ_MASK);
465 return crypto_shash_setkey(child, key, keylen);
468 static int cryptd_hash_enqueue(struct ahash_request *req,
469 crypto_completion_t compl)
471 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
472 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
473 struct cryptd_queue *queue =
474 cryptd_get_queue(crypto_ahash_tfm(tfm));
476 rctx->complete = req->base.complete;
477 rctx->data = req->base.data;
478 req->base.complete = compl;
479 req->base.data = req;
481 return cryptd_enqueue_request(queue, &req->base);
484 static struct shash_desc *cryptd_hash_prepare(struct ahash_request *req,
487 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
489 req->base.complete = rctx->complete;
490 req->base.data = rctx->data;
492 if (unlikely(err == -EINPROGRESS))
498 static void cryptd_hash_complete(struct ahash_request *req, int err,
499 crypto_completion_t complete)
501 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
502 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
503 int refcnt = refcount_read(&ctx->refcnt);
506 ahash_request_complete(req, err);
509 if (err == -EINPROGRESS) {
510 req->base.complete = complete;
511 req->base.data = req;
512 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt))
513 crypto_free_ahash(tfm);
516 static void cryptd_hash_init(void *data, int err)
518 struct ahash_request *req = data;
519 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
520 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
521 struct crypto_shash *child = ctx->child;
522 struct shash_desc *desc;
524 desc = cryptd_hash_prepare(req, err);
530 err = crypto_shash_init(desc);
533 cryptd_hash_complete(req, err, cryptd_hash_init);
536 static int cryptd_hash_init_enqueue(struct ahash_request *req)
538 return cryptd_hash_enqueue(req, cryptd_hash_init);
541 static void cryptd_hash_update(void *data, int err)
543 struct ahash_request *req = data;
544 struct shash_desc *desc;
546 desc = cryptd_hash_prepare(req, err);
548 err = shash_ahash_update(req, desc);
550 cryptd_hash_complete(req, err, cryptd_hash_update);
553 static int cryptd_hash_update_enqueue(struct ahash_request *req)
555 return cryptd_hash_enqueue(req, cryptd_hash_update);
558 static void cryptd_hash_final(void *data, int err)
560 struct ahash_request *req = data;
561 struct shash_desc *desc;
563 desc = cryptd_hash_prepare(req, err);
565 err = crypto_shash_final(desc, req->result);
567 cryptd_hash_complete(req, err, cryptd_hash_final);
570 static int cryptd_hash_final_enqueue(struct ahash_request *req)
572 return cryptd_hash_enqueue(req, cryptd_hash_final);
575 static void cryptd_hash_finup(void *data, int err)
577 struct ahash_request *req = data;
578 struct shash_desc *desc;
580 desc = cryptd_hash_prepare(req, err);
582 err = shash_ahash_finup(req, desc);
584 cryptd_hash_complete(req, err, cryptd_hash_finup);
587 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
589 return cryptd_hash_enqueue(req, cryptd_hash_finup);
592 static void cryptd_hash_digest(void *data, int err)
594 struct ahash_request *req = data;
595 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
596 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
597 struct crypto_shash *child = ctx->child;
598 struct shash_desc *desc;
600 desc = cryptd_hash_prepare(req, err);
606 err = shash_ahash_digest(req, desc);
609 cryptd_hash_complete(req, err, cryptd_hash_digest);
612 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
614 return cryptd_hash_enqueue(req, cryptd_hash_digest);
617 static int cryptd_hash_export(struct ahash_request *req, void *out)
619 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
621 return crypto_shash_export(&rctx->desc, out);
624 static int cryptd_hash_import(struct ahash_request *req, const void *in)
626 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
627 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
628 struct shash_desc *desc = cryptd_shash_desc(req);
630 desc->tfm = ctx->child;
632 return crypto_shash_import(desc, in);
635 static void cryptd_hash_free(struct ahash_instance *inst)
637 struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
639 crypto_drop_shash(&ctx->spawn);
643 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
644 struct crypto_attr_type *algt,
645 struct cryptd_queue *queue)
647 struct hashd_instance_ctx *ctx;
648 struct ahash_instance *inst;
649 struct shash_alg *alg;
654 cryptd_type_and_mask(algt, &type, &mask);
656 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
660 ctx = ahash_instance_ctx(inst);
663 err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
664 crypto_attr_alg_name(tb[1]), type, mask);
667 alg = crypto_spawn_shash_alg(&ctx->spawn);
669 err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
673 inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
674 (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
675 CRYPTO_ALG_OPTIONAL_KEY));
676 inst->alg.halg.digestsize = alg->digestsize;
677 inst->alg.halg.statesize = alg->statesize;
678 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
680 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
681 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
683 inst->alg.init = cryptd_hash_init_enqueue;
684 inst->alg.update = cryptd_hash_update_enqueue;
685 inst->alg.final = cryptd_hash_final_enqueue;
686 inst->alg.finup = cryptd_hash_finup_enqueue;
687 inst->alg.export = cryptd_hash_export;
688 inst->alg.import = cryptd_hash_import;
689 if (crypto_shash_alg_has_setkey(alg))
690 inst->alg.setkey = cryptd_hash_setkey;
691 inst->alg.digest = cryptd_hash_digest_enqueue;
693 inst->free = cryptd_hash_free;
695 err = ahash_register_instance(tmpl, inst);
698 cryptd_hash_free(inst);
703 static int cryptd_aead_setkey(struct crypto_aead *parent,
704 const u8 *key, unsigned int keylen)
706 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
707 struct crypto_aead *child = ctx->child;
709 return crypto_aead_setkey(child, key, keylen);
712 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
713 unsigned int authsize)
715 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
716 struct crypto_aead *child = ctx->child;
718 return crypto_aead_setauthsize(child, authsize);
721 static void cryptd_aead_crypt(struct aead_request *req,
722 struct crypto_aead *child, int err,
723 int (*crypt)(struct aead_request *req),
724 crypto_completion_t compl)
726 struct cryptd_aead_request_ctx *rctx;
727 struct aead_request *subreq;
728 struct cryptd_aead_ctx *ctx;
729 struct crypto_aead *tfm;
732 rctx = aead_request_ctx(req);
734 req->base.complete = subreq->base.complete;
735 req->base.data = subreq->base.data;
737 tfm = crypto_aead_reqtfm(req);
739 if (unlikely(err == -EINPROGRESS))
742 aead_request_set_tfm(subreq, child);
743 aead_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
745 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
747 aead_request_set_ad(subreq, req->assoclen);
752 ctx = crypto_aead_ctx(tfm);
753 refcnt = refcount_read(&ctx->refcnt);
756 aead_request_complete(req, err);
759 if (err == -EINPROGRESS) {
760 subreq->base.complete = req->base.complete;
761 subreq->base.data = req->base.data;
762 req->base.complete = compl;
763 req->base.data = req;
764 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt))
765 crypto_free_aead(tfm);
768 static void cryptd_aead_encrypt(void *data, int err)
770 struct aead_request *req = data;
771 struct cryptd_aead_ctx *ctx;
772 struct crypto_aead *child;
774 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
776 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt,
777 cryptd_aead_encrypt);
780 static void cryptd_aead_decrypt(void *data, int err)
782 struct aead_request *req = data;
783 struct cryptd_aead_ctx *ctx;
784 struct crypto_aead *child;
786 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
788 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt,
789 cryptd_aead_decrypt);
792 static int cryptd_aead_enqueue(struct aead_request *req,
793 crypto_completion_t compl)
795 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
796 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
797 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
798 struct aead_request *subreq = &rctx->req;
800 subreq->base.complete = req->base.complete;
801 subreq->base.data = req->base.data;
802 req->base.complete = compl;
803 req->base.data = req;
804 return cryptd_enqueue_request(queue, &req->base);
807 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
809 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
812 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
814 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
817 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
819 struct aead_instance *inst = aead_alg_instance(tfm);
820 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
821 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
822 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
823 struct crypto_aead *cipher;
825 cipher = crypto_spawn_aead(spawn);
827 return PTR_ERR(cipher);
830 crypto_aead_set_reqsize(
831 tfm, sizeof(struct cryptd_aead_request_ctx) +
832 crypto_aead_reqsize(cipher));
836 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
838 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
839 crypto_free_aead(ctx->child);
842 static void cryptd_aead_free(struct aead_instance *inst)
844 struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
846 crypto_drop_aead(&ctx->aead_spawn);
850 static int cryptd_create_aead(struct crypto_template *tmpl,
852 struct crypto_attr_type *algt,
853 struct cryptd_queue *queue)
855 struct aead_instance_ctx *ctx;
856 struct aead_instance *inst;
857 struct aead_alg *alg;
862 cryptd_type_and_mask(algt, &type, &mask);
864 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
868 ctx = aead_instance_ctx(inst);
871 err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
872 crypto_attr_alg_name(tb[1]), type, mask);
876 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
877 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
881 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
882 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
883 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
885 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
886 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
888 inst->alg.init = cryptd_aead_init_tfm;
889 inst->alg.exit = cryptd_aead_exit_tfm;
890 inst->alg.setkey = cryptd_aead_setkey;
891 inst->alg.setauthsize = cryptd_aead_setauthsize;
892 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
893 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
895 inst->free = cryptd_aead_free;
897 err = aead_register_instance(tmpl, inst);
900 cryptd_aead_free(inst);
905 static struct cryptd_queue queue;
907 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
909 struct crypto_attr_type *algt;
911 algt = crypto_get_attr_type(tb);
913 return PTR_ERR(algt);
915 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
916 case CRYPTO_ALG_TYPE_SKCIPHER:
917 return cryptd_create_skcipher(tmpl, tb, algt, &queue);
918 case CRYPTO_ALG_TYPE_HASH:
919 return cryptd_create_hash(tmpl, tb, algt, &queue);
920 case CRYPTO_ALG_TYPE_AEAD:
921 return cryptd_create_aead(tmpl, tb, algt, &queue);
927 static struct crypto_template cryptd_tmpl = {
929 .create = cryptd_create,
930 .module = THIS_MODULE,
933 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
936 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
937 struct cryptd_skcipher_ctx *ctx;
938 struct crypto_skcipher *tfm;
940 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
941 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
942 return ERR_PTR(-EINVAL);
944 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
946 return ERR_CAST(tfm);
948 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
949 crypto_free_skcipher(tfm);
950 return ERR_PTR(-EINVAL);
953 ctx = crypto_skcipher_ctx(tfm);
954 refcount_set(&ctx->refcnt, 1);
956 return container_of(tfm, struct cryptd_skcipher, base);
958 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
960 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
962 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
966 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
968 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
970 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
972 return refcount_read(&ctx->refcnt) - 1;
974 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
976 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
978 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
980 if (refcount_dec_and_test(&ctx->refcnt))
981 crypto_free_skcipher(&tfm->base);
983 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
985 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
988 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
989 struct cryptd_hash_ctx *ctx;
990 struct crypto_ahash *tfm;
992 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
993 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
994 return ERR_PTR(-EINVAL);
995 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
997 return ERR_CAST(tfm);
998 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
999 crypto_free_ahash(tfm);
1000 return ERR_PTR(-EINVAL);
1003 ctx = crypto_ahash_ctx(tfm);
1004 refcount_set(&ctx->refcnt, 1);
1006 return __cryptd_ahash_cast(tfm);
1008 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1010 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1012 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1016 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1018 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1020 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1023 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1025 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1027 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1029 return refcount_read(&ctx->refcnt) - 1;
1031 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1033 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1035 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1037 if (refcount_dec_and_test(&ctx->refcnt))
1038 crypto_free_ahash(&tfm->base);
1040 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1042 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1045 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1046 struct cryptd_aead_ctx *ctx;
1047 struct crypto_aead *tfm;
1049 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1050 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1051 return ERR_PTR(-EINVAL);
1052 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1054 return ERR_CAST(tfm);
1055 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1056 crypto_free_aead(tfm);
1057 return ERR_PTR(-EINVAL);
1060 ctx = crypto_aead_ctx(tfm);
1061 refcount_set(&ctx->refcnt, 1);
1063 return __cryptd_aead_cast(tfm);
1065 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1067 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1069 struct cryptd_aead_ctx *ctx;
1070 ctx = crypto_aead_ctx(&tfm->base);
1073 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1075 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1077 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1079 return refcount_read(&ctx->refcnt) - 1;
1081 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1083 void cryptd_free_aead(struct cryptd_aead *tfm)
1085 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1087 if (refcount_dec_and_test(&ctx->refcnt))
1088 crypto_free_aead(&tfm->base);
1090 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1092 static int __init cryptd_init(void)
1096 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1101 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1103 goto err_destroy_wq;
1105 err = crypto_register_template(&cryptd_tmpl);
1107 goto err_fini_queue;
1112 cryptd_fini_queue(&queue);
1114 destroy_workqueue(cryptd_wq);
1118 static void __exit cryptd_exit(void)
1120 destroy_workqueue(cryptd_wq);
1121 cryptd_fini_queue(&queue);
1122 crypto_unregister_template(&cryptd_tmpl);
1125 subsys_initcall(cryptd_init);
1126 module_exit(cryptd_exit);
1128 MODULE_LICENSE("GPL");
1129 MODULE_DESCRIPTION("Software async crypto daemon");
1130 MODULE_ALIAS_CRYPTO("cryptd");