1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen = 1000;
31 module_param(cryptd_max_cpu_qlen, uint, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
34 static struct workqueue_struct *cryptd_wq;
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
43 * Protected by disabling BH to allow enqueueing from softinterrupt and
44 * dequeuing from kworker (cryptd_queue_worker()).
46 struct cryptd_cpu_queue __percpu *cpu_queue;
49 struct cryptd_instance_ctx {
50 struct crypto_spawn spawn;
51 struct cryptd_queue *queue;
54 struct skcipherd_instance_ctx {
55 struct crypto_skcipher_spawn spawn;
56 struct cryptd_queue *queue;
59 struct hashd_instance_ctx {
60 struct crypto_shash_spawn spawn;
61 struct cryptd_queue *queue;
64 struct aead_instance_ctx {
65 struct crypto_aead_spawn aead_spawn;
66 struct cryptd_queue *queue;
69 struct cryptd_skcipher_ctx {
71 struct crypto_sync_skcipher *child;
74 struct cryptd_skcipher_request_ctx {
75 crypto_completion_t complete;
78 struct cryptd_hash_ctx {
80 struct crypto_shash *child;
83 struct cryptd_hash_request_ctx {
84 crypto_completion_t complete;
85 struct shash_desc desc;
88 struct cryptd_aead_ctx {
90 struct crypto_aead *child;
93 struct cryptd_aead_request_ctx {
94 crypto_completion_t complete;
97 static void cryptd_queue_worker(struct work_struct *work);
99 static int cryptd_init_queue(struct cryptd_queue *queue,
100 unsigned int max_cpu_qlen)
103 struct cryptd_cpu_queue *cpu_queue;
105 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
106 if (!queue->cpu_queue)
108 for_each_possible_cpu(cpu) {
109 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
110 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
111 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
113 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
117 static void cryptd_fini_queue(struct cryptd_queue *queue)
120 struct cryptd_cpu_queue *cpu_queue;
122 for_each_possible_cpu(cpu) {
123 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
124 BUG_ON(cpu_queue->queue.qlen);
126 free_percpu(queue->cpu_queue);
129 static int cryptd_enqueue_request(struct cryptd_queue *queue,
130 struct crypto_async_request *request)
133 struct cryptd_cpu_queue *cpu_queue;
137 cpu_queue = this_cpu_ptr(queue->cpu_queue);
138 err = crypto_enqueue_request(&cpu_queue->queue, request);
140 refcnt = crypto_tfm_ctx(request->tfm);
145 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
147 if (!refcount_read(refcnt))
150 refcount_inc(refcnt);
158 /* Called in workqueue context, do one real cryption work (via
159 * req->complete) and reschedule itself if there are more work to
161 static void cryptd_queue_worker(struct work_struct *work)
163 struct cryptd_cpu_queue *cpu_queue;
164 struct crypto_async_request *req, *backlog;
166 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
168 * Only handle one request at a time to avoid hogging crypto workqueue.
171 backlog = crypto_get_backlog(&cpu_queue->queue);
172 req = crypto_dequeue_request(&cpu_queue->queue);
179 backlog->complete(backlog, -EINPROGRESS);
180 req->complete(req, 0);
182 if (cpu_queue->queue.qlen)
183 queue_work(cryptd_wq, &cpu_queue->work);
186 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
188 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
189 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
193 static void cryptd_type_and_mask(struct crypto_attr_type *algt,
194 u32 *type, u32 *mask)
197 * cryptd is allowed to wrap internal algorithms, but in that case the
198 * resulting cryptd instance will be marked as internal as well.
200 *type = algt->type & CRYPTO_ALG_INTERNAL;
201 *mask = algt->mask & CRYPTO_ALG_INTERNAL;
203 /* No point in cryptd wrapping an algorithm that's already async. */
204 *mask |= CRYPTO_ALG_ASYNC;
206 *mask |= crypto_algt_inherited_mask(algt);
209 static int cryptd_init_instance(struct crypto_instance *inst,
210 struct crypto_alg *alg)
212 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
214 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
215 return -ENAMETOOLONG;
217 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
219 inst->alg.cra_priority = alg->cra_priority + 50;
220 inst->alg.cra_blocksize = alg->cra_blocksize;
221 inst->alg.cra_alignmask = alg->cra_alignmask;
226 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
227 const u8 *key, unsigned int keylen)
229 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
230 struct crypto_sync_skcipher *child = ctx->child;
232 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
233 crypto_sync_skcipher_set_flags(child,
234 crypto_skcipher_get_flags(parent) &
235 CRYPTO_TFM_REQ_MASK);
236 return crypto_sync_skcipher_setkey(child, key, keylen);
239 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
241 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
242 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
243 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
244 int refcnt = refcount_read(&ctx->refcnt);
247 rctx->complete(&req->base, err);
250 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
251 crypto_free_skcipher(tfm);
254 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
257 struct skcipher_request *req = skcipher_request_cast(base);
258 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
259 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
260 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
261 struct crypto_sync_skcipher *child = ctx->child;
262 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
264 if (unlikely(err == -EINPROGRESS))
267 skcipher_request_set_sync_tfm(subreq, child);
268 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
270 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
273 err = crypto_skcipher_encrypt(subreq);
274 skcipher_request_zero(subreq);
276 req->base.complete = rctx->complete;
279 cryptd_skcipher_complete(req, err);
282 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
285 struct skcipher_request *req = skcipher_request_cast(base);
286 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
287 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
288 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
289 struct crypto_sync_skcipher *child = ctx->child;
290 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
292 if (unlikely(err == -EINPROGRESS))
295 skcipher_request_set_sync_tfm(subreq, child);
296 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
298 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
301 err = crypto_skcipher_decrypt(subreq);
302 skcipher_request_zero(subreq);
304 req->base.complete = rctx->complete;
307 cryptd_skcipher_complete(req, err);
310 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
311 crypto_completion_t compl)
313 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
314 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
315 struct cryptd_queue *queue;
317 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
318 rctx->complete = req->base.complete;
319 req->base.complete = compl;
321 return cryptd_enqueue_request(queue, &req->base);
324 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
326 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
329 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
331 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
334 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
336 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
337 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
338 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
339 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
340 struct crypto_skcipher *cipher;
342 cipher = crypto_spawn_skcipher(spawn);
344 return PTR_ERR(cipher);
346 ctx->child = (struct crypto_sync_skcipher *)cipher;
347 crypto_skcipher_set_reqsize(
348 tfm, sizeof(struct cryptd_skcipher_request_ctx));
352 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
354 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
356 crypto_free_sync_skcipher(ctx->child);
359 static void cryptd_skcipher_free(struct skcipher_instance *inst)
361 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
363 crypto_drop_skcipher(&ctx->spawn);
367 static int cryptd_create_skcipher(struct crypto_template *tmpl,
369 struct crypto_attr_type *algt,
370 struct cryptd_queue *queue)
372 struct skcipherd_instance_ctx *ctx;
373 struct skcipher_instance *inst;
374 struct skcipher_alg *alg;
379 cryptd_type_and_mask(algt, &type, &mask);
381 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
385 ctx = skcipher_instance_ctx(inst);
388 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
389 crypto_attr_alg_name(tb[1]), type, mask);
393 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
394 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
398 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
399 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
400 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
401 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
402 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
403 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
405 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
407 inst->alg.init = cryptd_skcipher_init_tfm;
408 inst->alg.exit = cryptd_skcipher_exit_tfm;
410 inst->alg.setkey = cryptd_skcipher_setkey;
411 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
412 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
414 inst->free = cryptd_skcipher_free;
416 err = skcipher_register_instance(tmpl, inst);
419 cryptd_skcipher_free(inst);
424 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
426 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
427 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
428 struct crypto_shash_spawn *spawn = &ictx->spawn;
429 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
430 struct crypto_shash *hash;
432 hash = crypto_spawn_shash(spawn);
434 return PTR_ERR(hash);
437 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
438 sizeof(struct cryptd_hash_request_ctx) +
439 crypto_shash_descsize(hash));
443 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
445 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
447 crypto_free_shash(ctx->child);
450 static int cryptd_hash_setkey(struct crypto_ahash *parent,
451 const u8 *key, unsigned int keylen)
453 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
454 struct crypto_shash *child = ctx->child;
456 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
457 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
458 CRYPTO_TFM_REQ_MASK);
459 return crypto_shash_setkey(child, key, keylen);
462 static int cryptd_hash_enqueue(struct ahash_request *req,
463 crypto_completion_t compl)
465 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
466 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
467 struct cryptd_queue *queue =
468 cryptd_get_queue(crypto_ahash_tfm(tfm));
470 rctx->complete = req->base.complete;
471 req->base.complete = compl;
473 return cryptd_enqueue_request(queue, &req->base);
476 static void cryptd_hash_complete(struct ahash_request *req, int err)
478 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
479 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
480 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
481 int refcnt = refcount_read(&ctx->refcnt);
484 rctx->complete(&req->base, err);
487 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
488 crypto_free_ahash(tfm);
491 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
493 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
494 struct crypto_shash *child = ctx->child;
495 struct ahash_request *req = ahash_request_cast(req_async);
496 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
497 struct shash_desc *desc = &rctx->desc;
499 if (unlikely(err == -EINPROGRESS))
504 err = crypto_shash_init(desc);
506 req->base.complete = rctx->complete;
509 cryptd_hash_complete(req, err);
512 static int cryptd_hash_init_enqueue(struct ahash_request *req)
514 return cryptd_hash_enqueue(req, cryptd_hash_init);
517 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
519 struct ahash_request *req = ahash_request_cast(req_async);
520 struct cryptd_hash_request_ctx *rctx;
522 rctx = ahash_request_ctx(req);
524 if (unlikely(err == -EINPROGRESS))
527 err = shash_ahash_update(req, &rctx->desc);
529 req->base.complete = rctx->complete;
532 cryptd_hash_complete(req, err);
535 static int cryptd_hash_update_enqueue(struct ahash_request *req)
537 return cryptd_hash_enqueue(req, cryptd_hash_update);
540 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
542 struct ahash_request *req = ahash_request_cast(req_async);
543 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
545 if (unlikely(err == -EINPROGRESS))
548 err = crypto_shash_final(&rctx->desc, req->result);
550 req->base.complete = rctx->complete;
553 cryptd_hash_complete(req, err);
556 static int cryptd_hash_final_enqueue(struct ahash_request *req)
558 return cryptd_hash_enqueue(req, cryptd_hash_final);
561 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
563 struct ahash_request *req = ahash_request_cast(req_async);
564 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
566 if (unlikely(err == -EINPROGRESS))
569 err = shash_ahash_finup(req, &rctx->desc);
571 req->base.complete = rctx->complete;
574 cryptd_hash_complete(req, err);
577 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
579 return cryptd_hash_enqueue(req, cryptd_hash_finup);
582 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
584 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
585 struct crypto_shash *child = ctx->child;
586 struct ahash_request *req = ahash_request_cast(req_async);
587 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
588 struct shash_desc *desc = &rctx->desc;
590 if (unlikely(err == -EINPROGRESS))
595 err = shash_ahash_digest(req, desc);
597 req->base.complete = rctx->complete;
600 cryptd_hash_complete(req, err);
603 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
605 return cryptd_hash_enqueue(req, cryptd_hash_digest);
608 static int cryptd_hash_export(struct ahash_request *req, void *out)
610 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
612 return crypto_shash_export(&rctx->desc, out);
615 static int cryptd_hash_import(struct ahash_request *req, const void *in)
617 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
618 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
619 struct shash_desc *desc = cryptd_shash_desc(req);
621 desc->tfm = ctx->child;
623 return crypto_shash_import(desc, in);
626 static void cryptd_hash_free(struct ahash_instance *inst)
628 struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
630 crypto_drop_shash(&ctx->spawn);
634 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
635 struct crypto_attr_type *algt,
636 struct cryptd_queue *queue)
638 struct hashd_instance_ctx *ctx;
639 struct ahash_instance *inst;
640 struct shash_alg *alg;
645 cryptd_type_and_mask(algt, &type, &mask);
647 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
651 ctx = ahash_instance_ctx(inst);
654 err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
655 crypto_attr_alg_name(tb[1]), type, mask);
658 alg = crypto_spawn_shash_alg(&ctx->spawn);
660 err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
664 inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
665 (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
666 CRYPTO_ALG_OPTIONAL_KEY));
667 inst->alg.halg.digestsize = alg->digestsize;
668 inst->alg.halg.statesize = alg->statesize;
669 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
671 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
672 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
674 inst->alg.init = cryptd_hash_init_enqueue;
675 inst->alg.update = cryptd_hash_update_enqueue;
676 inst->alg.final = cryptd_hash_final_enqueue;
677 inst->alg.finup = cryptd_hash_finup_enqueue;
678 inst->alg.export = cryptd_hash_export;
679 inst->alg.import = cryptd_hash_import;
680 if (crypto_shash_alg_has_setkey(alg))
681 inst->alg.setkey = cryptd_hash_setkey;
682 inst->alg.digest = cryptd_hash_digest_enqueue;
684 inst->free = cryptd_hash_free;
686 err = ahash_register_instance(tmpl, inst);
689 cryptd_hash_free(inst);
694 static int cryptd_aead_setkey(struct crypto_aead *parent,
695 const u8 *key, unsigned int keylen)
697 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
698 struct crypto_aead *child = ctx->child;
700 return crypto_aead_setkey(child, key, keylen);
703 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
704 unsigned int authsize)
706 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
707 struct crypto_aead *child = ctx->child;
709 return crypto_aead_setauthsize(child, authsize);
712 static void cryptd_aead_crypt(struct aead_request *req,
713 struct crypto_aead *child,
715 int (*crypt)(struct aead_request *req))
717 struct cryptd_aead_request_ctx *rctx;
718 struct cryptd_aead_ctx *ctx;
719 crypto_completion_t compl;
720 struct crypto_aead *tfm;
723 rctx = aead_request_ctx(req);
724 compl = rctx->complete;
726 tfm = crypto_aead_reqtfm(req);
728 if (unlikely(err == -EINPROGRESS))
730 aead_request_set_tfm(req, child);
734 ctx = crypto_aead_ctx(tfm);
735 refcnt = refcount_read(&ctx->refcnt);
738 compl(&req->base, err);
741 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
742 crypto_free_aead(tfm);
745 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
747 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
748 struct crypto_aead *child = ctx->child;
749 struct aead_request *req;
751 req = container_of(areq, struct aead_request, base);
752 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
755 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
757 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
758 struct crypto_aead *child = ctx->child;
759 struct aead_request *req;
761 req = container_of(areq, struct aead_request, base);
762 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
765 static int cryptd_aead_enqueue(struct aead_request *req,
766 crypto_completion_t compl)
768 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
769 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
770 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
772 rctx->complete = req->base.complete;
773 req->base.complete = compl;
774 return cryptd_enqueue_request(queue, &req->base);
777 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
779 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
782 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
784 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
787 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
789 struct aead_instance *inst = aead_alg_instance(tfm);
790 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
791 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
792 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
793 struct crypto_aead *cipher;
795 cipher = crypto_spawn_aead(spawn);
797 return PTR_ERR(cipher);
800 crypto_aead_set_reqsize(
801 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
802 crypto_aead_reqsize(cipher)));
806 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
808 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
809 crypto_free_aead(ctx->child);
812 static void cryptd_aead_free(struct aead_instance *inst)
814 struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
816 crypto_drop_aead(&ctx->aead_spawn);
820 static int cryptd_create_aead(struct crypto_template *tmpl,
822 struct crypto_attr_type *algt,
823 struct cryptd_queue *queue)
825 struct aead_instance_ctx *ctx;
826 struct aead_instance *inst;
827 struct aead_alg *alg;
832 cryptd_type_and_mask(algt, &type, &mask);
834 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
838 ctx = aead_instance_ctx(inst);
841 err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
842 crypto_attr_alg_name(tb[1]), type, mask);
846 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
847 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
851 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
852 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
853 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
855 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
856 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
858 inst->alg.init = cryptd_aead_init_tfm;
859 inst->alg.exit = cryptd_aead_exit_tfm;
860 inst->alg.setkey = cryptd_aead_setkey;
861 inst->alg.setauthsize = cryptd_aead_setauthsize;
862 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
863 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
865 inst->free = cryptd_aead_free;
867 err = aead_register_instance(tmpl, inst);
870 cryptd_aead_free(inst);
875 static struct cryptd_queue queue;
877 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
879 struct crypto_attr_type *algt;
881 algt = crypto_get_attr_type(tb);
883 return PTR_ERR(algt);
885 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
886 case CRYPTO_ALG_TYPE_SKCIPHER:
887 return cryptd_create_skcipher(tmpl, tb, algt, &queue);
888 case CRYPTO_ALG_TYPE_HASH:
889 return cryptd_create_hash(tmpl, tb, algt, &queue);
890 case CRYPTO_ALG_TYPE_AEAD:
891 return cryptd_create_aead(tmpl, tb, algt, &queue);
897 static struct crypto_template cryptd_tmpl = {
899 .create = cryptd_create,
900 .module = THIS_MODULE,
903 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
906 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
907 struct cryptd_skcipher_ctx *ctx;
908 struct crypto_skcipher *tfm;
910 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
911 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
912 return ERR_PTR(-EINVAL);
914 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
916 return ERR_CAST(tfm);
918 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
919 crypto_free_skcipher(tfm);
920 return ERR_PTR(-EINVAL);
923 ctx = crypto_skcipher_ctx(tfm);
924 refcount_set(&ctx->refcnt, 1);
926 return container_of(tfm, struct cryptd_skcipher, base);
928 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
930 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
932 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
934 return &ctx->child->base;
936 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
938 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
940 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
942 return refcount_read(&ctx->refcnt) - 1;
944 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
946 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
948 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
950 if (refcount_dec_and_test(&ctx->refcnt))
951 crypto_free_skcipher(&tfm->base);
953 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
955 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
958 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
959 struct cryptd_hash_ctx *ctx;
960 struct crypto_ahash *tfm;
962 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
963 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
964 return ERR_PTR(-EINVAL);
965 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
967 return ERR_CAST(tfm);
968 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
969 crypto_free_ahash(tfm);
970 return ERR_PTR(-EINVAL);
973 ctx = crypto_ahash_ctx(tfm);
974 refcount_set(&ctx->refcnt, 1);
976 return __cryptd_ahash_cast(tfm);
978 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
980 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
982 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
986 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
988 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
990 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
993 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
995 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
997 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
999 return refcount_read(&ctx->refcnt) - 1;
1001 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1003 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1005 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1007 if (refcount_dec_and_test(&ctx->refcnt))
1008 crypto_free_ahash(&tfm->base);
1010 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1012 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1015 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1016 struct cryptd_aead_ctx *ctx;
1017 struct crypto_aead *tfm;
1019 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1020 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1021 return ERR_PTR(-EINVAL);
1022 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1024 return ERR_CAST(tfm);
1025 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1026 crypto_free_aead(tfm);
1027 return ERR_PTR(-EINVAL);
1030 ctx = crypto_aead_ctx(tfm);
1031 refcount_set(&ctx->refcnt, 1);
1033 return __cryptd_aead_cast(tfm);
1035 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1037 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1039 struct cryptd_aead_ctx *ctx;
1040 ctx = crypto_aead_ctx(&tfm->base);
1043 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1045 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1047 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1049 return refcount_read(&ctx->refcnt) - 1;
1051 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1053 void cryptd_free_aead(struct cryptd_aead *tfm)
1055 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1057 if (refcount_dec_and_test(&ctx->refcnt))
1058 crypto_free_aead(&tfm->base);
1060 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1062 static int __init cryptd_init(void)
1066 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1071 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1073 goto err_destroy_wq;
1075 err = crypto_register_template(&cryptd_tmpl);
1077 goto err_fini_queue;
1082 cryptd_fini_queue(&queue);
1084 destroy_workqueue(cryptd_wq);
1088 static void __exit cryptd_exit(void)
1090 destroy_workqueue(cryptd_wq);
1091 cryptd_fini_queue(&queue);
1092 crypto_unregister_template(&cryptd_tmpl);
1095 subsys_initcall(cryptd_init);
1096 module_exit(cryptd_exit);
1098 MODULE_LICENSE("GPL");
1099 MODULE_DESCRIPTION("Software async crypto daemon");
1100 MODULE_ALIAS_CRYPTO("cryptd");