2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 static unsigned int cryptd_max_cpu_qlen = 1000;
36 module_param(cryptd_max_cpu_qlen, uint, 0);
37 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
39 struct cryptd_cpu_queue {
40 struct crypto_queue queue;
41 struct work_struct work;
45 struct cryptd_cpu_queue __percpu *cpu_queue;
48 struct cryptd_instance_ctx {
49 struct crypto_spawn spawn;
50 struct cryptd_queue *queue;
53 struct skcipherd_instance_ctx {
54 struct crypto_skcipher_spawn spawn;
55 struct cryptd_queue *queue;
58 struct hashd_instance_ctx {
59 struct crypto_shash_spawn spawn;
60 struct cryptd_queue *queue;
63 struct aead_instance_ctx {
64 struct crypto_aead_spawn aead_spawn;
65 struct cryptd_queue *queue;
68 struct cryptd_blkcipher_ctx {
70 struct crypto_blkcipher *child;
73 struct cryptd_blkcipher_request_ctx {
74 crypto_completion_t complete;
77 struct cryptd_skcipher_ctx {
79 struct crypto_sync_skcipher *child;
82 struct cryptd_skcipher_request_ctx {
83 crypto_completion_t complete;
86 struct cryptd_hash_ctx {
88 struct crypto_shash *child;
91 struct cryptd_hash_request_ctx {
92 crypto_completion_t complete;
93 struct shash_desc desc;
96 struct cryptd_aead_ctx {
98 struct crypto_aead *child;
101 struct cryptd_aead_request_ctx {
102 crypto_completion_t complete;
105 static void cryptd_queue_worker(struct work_struct *work);
107 static int cryptd_init_queue(struct cryptd_queue *queue,
108 unsigned int max_cpu_qlen)
111 struct cryptd_cpu_queue *cpu_queue;
113 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
114 if (!queue->cpu_queue)
116 for_each_possible_cpu(cpu) {
117 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
118 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
119 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
121 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
125 static void cryptd_fini_queue(struct cryptd_queue *queue)
128 struct cryptd_cpu_queue *cpu_queue;
130 for_each_possible_cpu(cpu) {
131 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
132 BUG_ON(cpu_queue->queue.qlen);
134 free_percpu(queue->cpu_queue);
137 static int cryptd_enqueue_request(struct cryptd_queue *queue,
138 struct crypto_async_request *request)
141 struct cryptd_cpu_queue *cpu_queue;
145 cpu_queue = this_cpu_ptr(queue->cpu_queue);
146 err = crypto_enqueue_request(&cpu_queue->queue, request);
148 refcnt = crypto_tfm_ctx(request->tfm);
153 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
155 if (!atomic_read(refcnt))
166 /* Called in workqueue context, do one real cryption work (via
167 * req->complete) and reschedule itself if there are more work to
169 static void cryptd_queue_worker(struct work_struct *work)
171 struct cryptd_cpu_queue *cpu_queue;
172 struct crypto_async_request *req, *backlog;
174 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
176 * Only handle one request at a time to avoid hogging crypto workqueue.
177 * preempt_disable/enable is used to prevent being preempted by
178 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
179 * cryptd_enqueue_request() being accessed from software interrupts.
183 backlog = crypto_get_backlog(&cpu_queue->queue);
184 req = crypto_dequeue_request(&cpu_queue->queue);
192 backlog->complete(backlog, -EINPROGRESS);
193 req->complete(req, 0);
195 if (cpu_queue->queue.qlen)
196 queue_work(kcrypto_wq, &cpu_queue->work);
199 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
201 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
202 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
206 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
209 struct crypto_attr_type *algt;
211 algt = crypto_get_attr_type(tb);
215 *type |= algt->type & CRYPTO_ALG_INTERNAL;
216 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
219 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
220 const u8 *key, unsigned int keylen)
222 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
223 struct crypto_blkcipher *child = ctx->child;
226 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
227 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
228 CRYPTO_TFM_REQ_MASK);
229 err = crypto_blkcipher_setkey(child, key, keylen);
230 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
231 CRYPTO_TFM_RES_MASK);
235 static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
236 struct crypto_blkcipher *child,
238 int (*crypt)(struct blkcipher_desc *desc,
239 struct scatterlist *dst,
240 struct scatterlist *src,
243 struct cryptd_blkcipher_request_ctx *rctx;
244 struct cryptd_blkcipher_ctx *ctx;
245 struct crypto_ablkcipher *tfm;
246 struct blkcipher_desc desc;
249 rctx = ablkcipher_request_ctx(req);
251 if (unlikely(err == -EINPROGRESS))
255 desc.info = req->info;
256 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
258 err = crypt(&desc, req->dst, req->src, req->nbytes);
260 req->base.complete = rctx->complete;
263 tfm = crypto_ablkcipher_reqtfm(req);
264 ctx = crypto_ablkcipher_ctx(tfm);
265 refcnt = atomic_read(&ctx->refcnt);
268 rctx->complete(&req->base, err);
271 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
272 crypto_free_ablkcipher(tfm);
275 static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
277 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
278 struct crypto_blkcipher *child = ctx->child;
280 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
281 crypto_blkcipher_crt(child)->encrypt);
284 static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
286 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
287 struct crypto_blkcipher *child = ctx->child;
289 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
290 crypto_blkcipher_crt(child)->decrypt);
293 static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
294 crypto_completion_t compl)
296 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
297 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
298 struct cryptd_queue *queue;
300 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
301 rctx->complete = req->base.complete;
302 req->base.complete = compl;
304 return cryptd_enqueue_request(queue, &req->base);
307 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
309 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
312 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
314 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
317 static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
319 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
320 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
321 struct crypto_spawn *spawn = &ictx->spawn;
322 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
323 struct crypto_blkcipher *cipher;
325 cipher = crypto_spawn_blkcipher(spawn);
327 return PTR_ERR(cipher);
330 tfm->crt_ablkcipher.reqsize =
331 sizeof(struct cryptd_blkcipher_request_ctx);
335 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
337 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
339 crypto_free_blkcipher(ctx->child);
342 static int cryptd_init_instance(struct crypto_instance *inst,
343 struct crypto_alg *alg)
345 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
347 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
348 return -ENAMETOOLONG;
350 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
352 inst->alg.cra_priority = alg->cra_priority + 50;
353 inst->alg.cra_blocksize = alg->cra_blocksize;
354 inst->alg.cra_alignmask = alg->cra_alignmask;
359 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
363 struct crypto_instance *inst;
366 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
368 return ERR_PTR(-ENOMEM);
370 inst = (void *)(p + head);
372 err = cryptd_init_instance(inst, alg);
385 static int cryptd_create_blkcipher(struct crypto_template *tmpl,
387 struct cryptd_queue *queue)
389 struct cryptd_instance_ctx *ctx;
390 struct crypto_instance *inst;
391 struct crypto_alg *alg;
392 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
393 u32 mask = CRYPTO_ALG_TYPE_MASK;
396 cryptd_check_internal(tb, &type, &mask);
398 alg = crypto_get_attr_alg(tb, type, mask);
402 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
407 ctx = crypto_instance_ctx(inst);
410 err = crypto_init_spawn(&ctx->spawn, alg, inst,
411 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
415 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
416 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
417 type |= CRYPTO_ALG_INTERNAL;
418 inst->alg.cra_flags = type;
419 inst->alg.cra_type = &crypto_ablkcipher_type;
421 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
422 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
423 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
425 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
427 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
429 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
430 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
432 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
433 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
434 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
436 err = crypto_register_instance(tmpl, inst);
438 crypto_drop_spawn(&ctx->spawn);
448 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
449 const u8 *key, unsigned int keylen)
451 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
452 struct crypto_sync_skcipher *child = ctx->child;
455 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
456 crypto_sync_skcipher_set_flags(child,
457 crypto_skcipher_get_flags(parent) &
458 CRYPTO_TFM_REQ_MASK);
459 err = crypto_sync_skcipher_setkey(child, key, keylen);
460 crypto_skcipher_set_flags(parent,
461 crypto_sync_skcipher_get_flags(child) &
462 CRYPTO_TFM_RES_MASK);
466 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
468 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
469 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
470 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
471 int refcnt = atomic_read(&ctx->refcnt);
474 rctx->complete(&req->base, err);
477 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
478 crypto_free_skcipher(tfm);
481 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
484 struct skcipher_request *req = skcipher_request_cast(base);
485 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
486 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
487 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
488 struct crypto_sync_skcipher *child = ctx->child;
489 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
491 if (unlikely(err == -EINPROGRESS))
494 skcipher_request_set_sync_tfm(subreq, child);
495 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
497 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
500 err = crypto_skcipher_encrypt(subreq);
501 skcipher_request_zero(subreq);
503 req->base.complete = rctx->complete;
506 cryptd_skcipher_complete(req, err);
509 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
512 struct skcipher_request *req = skcipher_request_cast(base);
513 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
514 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
515 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
516 struct crypto_sync_skcipher *child = ctx->child;
517 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
519 if (unlikely(err == -EINPROGRESS))
522 skcipher_request_set_sync_tfm(subreq, child);
523 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
525 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
528 err = crypto_skcipher_decrypt(subreq);
529 skcipher_request_zero(subreq);
531 req->base.complete = rctx->complete;
534 cryptd_skcipher_complete(req, err);
537 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
538 crypto_completion_t compl)
540 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
541 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
542 struct cryptd_queue *queue;
544 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
545 rctx->complete = req->base.complete;
546 req->base.complete = compl;
548 return cryptd_enqueue_request(queue, &req->base);
551 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
553 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
556 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
558 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
561 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
563 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
564 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
565 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
566 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
567 struct crypto_skcipher *cipher;
569 cipher = crypto_spawn_skcipher(spawn);
571 return PTR_ERR(cipher);
573 ctx->child = (struct crypto_sync_skcipher *)cipher;
574 crypto_skcipher_set_reqsize(
575 tfm, sizeof(struct cryptd_skcipher_request_ctx));
579 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
581 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
583 crypto_free_sync_skcipher(ctx->child);
586 static void cryptd_skcipher_free(struct skcipher_instance *inst)
588 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
590 crypto_drop_skcipher(&ctx->spawn);
593 static int cryptd_create_skcipher(struct crypto_template *tmpl,
595 struct cryptd_queue *queue)
597 struct skcipherd_instance_ctx *ctx;
598 struct skcipher_instance *inst;
599 struct skcipher_alg *alg;
606 mask = CRYPTO_ALG_ASYNC;
608 cryptd_check_internal(tb, &type, &mask);
610 name = crypto_attr_alg_name(tb[1]);
612 return PTR_ERR(name);
614 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
618 ctx = skcipher_instance_ctx(inst);
621 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
622 err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
626 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
627 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
629 goto out_drop_skcipher;
631 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
632 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
634 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
635 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
636 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
637 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
639 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
641 inst->alg.init = cryptd_skcipher_init_tfm;
642 inst->alg.exit = cryptd_skcipher_exit_tfm;
644 inst->alg.setkey = cryptd_skcipher_setkey;
645 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
646 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
648 inst->free = cryptd_skcipher_free;
650 err = skcipher_register_instance(tmpl, inst);
653 crypto_drop_skcipher(&ctx->spawn);
660 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
662 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
663 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
664 struct crypto_shash_spawn *spawn = &ictx->spawn;
665 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
666 struct crypto_shash *hash;
668 hash = crypto_spawn_shash(spawn);
670 return PTR_ERR(hash);
673 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
674 sizeof(struct cryptd_hash_request_ctx) +
675 crypto_shash_descsize(hash));
679 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
681 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
683 crypto_free_shash(ctx->child);
686 static int cryptd_hash_setkey(struct crypto_ahash *parent,
687 const u8 *key, unsigned int keylen)
689 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
690 struct crypto_shash *child = ctx->child;
693 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
694 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
695 CRYPTO_TFM_REQ_MASK);
696 err = crypto_shash_setkey(child, key, keylen);
697 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
698 CRYPTO_TFM_RES_MASK);
702 static int cryptd_hash_enqueue(struct ahash_request *req,
703 crypto_completion_t compl)
705 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
706 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
707 struct cryptd_queue *queue =
708 cryptd_get_queue(crypto_ahash_tfm(tfm));
710 rctx->complete = req->base.complete;
711 req->base.complete = compl;
713 return cryptd_enqueue_request(queue, &req->base);
716 static void cryptd_hash_complete(struct ahash_request *req, int err)
718 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
719 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
720 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
721 int refcnt = atomic_read(&ctx->refcnt);
724 rctx->complete(&req->base, err);
727 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
728 crypto_free_ahash(tfm);
731 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
733 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
734 struct crypto_shash *child = ctx->child;
735 struct ahash_request *req = ahash_request_cast(req_async);
736 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
737 struct shash_desc *desc = &rctx->desc;
739 if (unlikely(err == -EINPROGRESS))
743 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
745 err = crypto_shash_init(desc);
747 req->base.complete = rctx->complete;
750 cryptd_hash_complete(req, err);
753 static int cryptd_hash_init_enqueue(struct ahash_request *req)
755 return cryptd_hash_enqueue(req, cryptd_hash_init);
758 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
760 struct ahash_request *req = ahash_request_cast(req_async);
761 struct cryptd_hash_request_ctx *rctx;
763 rctx = ahash_request_ctx(req);
765 if (unlikely(err == -EINPROGRESS))
768 err = shash_ahash_update(req, &rctx->desc);
770 req->base.complete = rctx->complete;
773 cryptd_hash_complete(req, err);
776 static int cryptd_hash_update_enqueue(struct ahash_request *req)
778 return cryptd_hash_enqueue(req, cryptd_hash_update);
781 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
783 struct ahash_request *req = ahash_request_cast(req_async);
784 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
786 if (unlikely(err == -EINPROGRESS))
789 err = crypto_shash_final(&rctx->desc, req->result);
791 req->base.complete = rctx->complete;
794 cryptd_hash_complete(req, err);
797 static int cryptd_hash_final_enqueue(struct ahash_request *req)
799 return cryptd_hash_enqueue(req, cryptd_hash_final);
802 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
804 struct ahash_request *req = ahash_request_cast(req_async);
805 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
807 if (unlikely(err == -EINPROGRESS))
810 err = shash_ahash_finup(req, &rctx->desc);
812 req->base.complete = rctx->complete;
815 cryptd_hash_complete(req, err);
818 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
820 return cryptd_hash_enqueue(req, cryptd_hash_finup);
823 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
825 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
826 struct crypto_shash *child = ctx->child;
827 struct ahash_request *req = ahash_request_cast(req_async);
828 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
829 struct shash_desc *desc = &rctx->desc;
831 if (unlikely(err == -EINPROGRESS))
835 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
837 err = shash_ahash_digest(req, desc);
839 req->base.complete = rctx->complete;
842 cryptd_hash_complete(req, err);
845 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
847 return cryptd_hash_enqueue(req, cryptd_hash_digest);
850 static int cryptd_hash_export(struct ahash_request *req, void *out)
852 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
854 return crypto_shash_export(&rctx->desc, out);
857 static int cryptd_hash_import(struct ahash_request *req, const void *in)
859 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
860 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
861 struct shash_desc *desc = cryptd_shash_desc(req);
863 desc->tfm = ctx->child;
864 desc->flags = req->base.flags;
866 return crypto_shash_import(desc, in);
869 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
870 struct cryptd_queue *queue)
872 struct hashd_instance_ctx *ctx;
873 struct ahash_instance *inst;
874 struct shash_alg *salg;
875 struct crypto_alg *alg;
880 cryptd_check_internal(tb, &type, &mask);
882 salg = shash_attr_alg(tb[1], type, mask);
884 return PTR_ERR(salg);
887 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
893 ctx = ahash_instance_ctx(inst);
896 err = crypto_init_shash_spawn(&ctx->spawn, salg,
897 ahash_crypto_instance(inst));
901 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
902 (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
903 CRYPTO_ALG_OPTIONAL_KEY));
905 inst->alg.halg.digestsize = salg->digestsize;
906 inst->alg.halg.statesize = salg->statesize;
907 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
909 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
910 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
912 inst->alg.init = cryptd_hash_init_enqueue;
913 inst->alg.update = cryptd_hash_update_enqueue;
914 inst->alg.final = cryptd_hash_final_enqueue;
915 inst->alg.finup = cryptd_hash_finup_enqueue;
916 inst->alg.export = cryptd_hash_export;
917 inst->alg.import = cryptd_hash_import;
918 if (crypto_shash_alg_has_setkey(salg))
919 inst->alg.setkey = cryptd_hash_setkey;
920 inst->alg.digest = cryptd_hash_digest_enqueue;
922 err = ahash_register_instance(tmpl, inst);
924 crypto_drop_shash(&ctx->spawn);
934 static int cryptd_aead_setkey(struct crypto_aead *parent,
935 const u8 *key, unsigned int keylen)
937 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
938 struct crypto_aead *child = ctx->child;
940 return crypto_aead_setkey(child, key, keylen);
943 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
944 unsigned int authsize)
946 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
947 struct crypto_aead *child = ctx->child;
949 return crypto_aead_setauthsize(child, authsize);
952 static void cryptd_aead_crypt(struct aead_request *req,
953 struct crypto_aead *child,
955 int (*crypt)(struct aead_request *req))
957 struct cryptd_aead_request_ctx *rctx;
958 struct cryptd_aead_ctx *ctx;
959 crypto_completion_t compl;
960 struct crypto_aead *tfm;
963 rctx = aead_request_ctx(req);
964 compl = rctx->complete;
966 tfm = crypto_aead_reqtfm(req);
968 if (unlikely(err == -EINPROGRESS))
970 aead_request_set_tfm(req, child);
974 ctx = crypto_aead_ctx(tfm);
975 refcnt = atomic_read(&ctx->refcnt);
978 compl(&req->base, err);
981 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
982 crypto_free_aead(tfm);
985 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
987 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
988 struct crypto_aead *child = ctx->child;
989 struct aead_request *req;
991 req = container_of(areq, struct aead_request, base);
992 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
995 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
997 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
998 struct crypto_aead *child = ctx->child;
999 struct aead_request *req;
1001 req = container_of(areq, struct aead_request, base);
1002 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
1005 static int cryptd_aead_enqueue(struct aead_request *req,
1006 crypto_completion_t compl)
1008 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
1009 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1010 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
1012 rctx->complete = req->base.complete;
1013 req->base.complete = compl;
1014 return cryptd_enqueue_request(queue, &req->base);
1017 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
1019 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
1022 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
1024 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
1027 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
1029 struct aead_instance *inst = aead_alg_instance(tfm);
1030 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
1031 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
1032 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
1033 struct crypto_aead *cipher;
1035 cipher = crypto_spawn_aead(spawn);
1037 return PTR_ERR(cipher);
1039 ctx->child = cipher;
1040 crypto_aead_set_reqsize(
1041 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
1042 crypto_aead_reqsize(cipher)));
1046 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
1048 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
1049 crypto_free_aead(ctx->child);
1052 static int cryptd_create_aead(struct crypto_template *tmpl,
1054 struct cryptd_queue *queue)
1056 struct aead_instance_ctx *ctx;
1057 struct aead_instance *inst;
1058 struct aead_alg *alg;
1061 u32 mask = CRYPTO_ALG_ASYNC;
1064 cryptd_check_internal(tb, &type, &mask);
1066 name = crypto_attr_alg_name(tb[1]);
1068 return PTR_ERR(name);
1070 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1074 ctx = aead_instance_ctx(inst);
1077 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
1078 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
1082 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
1083 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
1087 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
1088 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
1089 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
1091 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
1092 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
1094 inst->alg.init = cryptd_aead_init_tfm;
1095 inst->alg.exit = cryptd_aead_exit_tfm;
1096 inst->alg.setkey = cryptd_aead_setkey;
1097 inst->alg.setauthsize = cryptd_aead_setauthsize;
1098 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
1099 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
1101 err = aead_register_instance(tmpl, inst);
1104 crypto_drop_aead(&ctx->aead_spawn);
1111 static struct cryptd_queue queue;
1113 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
1115 struct crypto_attr_type *algt;
1117 algt = crypto_get_attr_type(tb);
1119 return PTR_ERR(algt);
1121 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
1122 case CRYPTO_ALG_TYPE_BLKCIPHER:
1123 if ((algt->type & CRYPTO_ALG_TYPE_MASK) ==
1124 CRYPTO_ALG_TYPE_BLKCIPHER)
1125 return cryptd_create_blkcipher(tmpl, tb, &queue);
1127 return cryptd_create_skcipher(tmpl, tb, &queue);
1128 case CRYPTO_ALG_TYPE_DIGEST:
1129 return cryptd_create_hash(tmpl, tb, &queue);
1130 case CRYPTO_ALG_TYPE_AEAD:
1131 return cryptd_create_aead(tmpl, tb, &queue);
1137 static void cryptd_free(struct crypto_instance *inst)
1139 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
1140 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
1141 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
1143 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
1144 case CRYPTO_ALG_TYPE_AHASH:
1145 crypto_drop_shash(&hctx->spawn);
1146 kfree(ahash_instance(inst));
1148 case CRYPTO_ALG_TYPE_AEAD:
1149 crypto_drop_aead(&aead_ctx->aead_spawn);
1150 kfree(aead_instance(inst));
1153 crypto_drop_spawn(&ctx->spawn);
1158 static struct crypto_template cryptd_tmpl = {
1160 .create = cryptd_create,
1161 .free = cryptd_free,
1162 .module = THIS_MODULE,
1165 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
1168 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1169 struct cryptd_blkcipher_ctx *ctx;
1170 struct crypto_tfm *tfm;
1172 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1173 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1174 return ERR_PTR(-EINVAL);
1175 type = crypto_skcipher_type(type);
1176 mask &= ~CRYPTO_ALG_TYPE_MASK;
1177 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
1178 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1180 return ERR_CAST(tfm);
1181 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
1182 crypto_free_tfm(tfm);
1183 return ERR_PTR(-EINVAL);
1186 ctx = crypto_tfm_ctx(tfm);
1187 atomic_set(&ctx->refcnt, 1);
1189 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1191 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
1193 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
1195 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1198 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
1200 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
1202 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1204 return atomic_read(&ctx->refcnt) - 1;
1206 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
1208 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
1210 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1212 if (atomic_dec_and_test(&ctx->refcnt))
1213 crypto_free_ablkcipher(&tfm->base);
1215 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
1217 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
1220 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1221 struct cryptd_skcipher_ctx *ctx;
1222 struct crypto_skcipher *tfm;
1224 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1225 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1226 return ERR_PTR(-EINVAL);
1228 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
1230 return ERR_CAST(tfm);
1232 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1233 crypto_free_skcipher(tfm);
1234 return ERR_PTR(-EINVAL);
1237 ctx = crypto_skcipher_ctx(tfm);
1238 atomic_set(&ctx->refcnt, 1);
1240 return container_of(tfm, struct cryptd_skcipher, base);
1242 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
1244 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
1246 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1248 return &ctx->child->base;
1250 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
1252 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
1254 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1256 return atomic_read(&ctx->refcnt) - 1;
1258 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
1260 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
1262 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1264 if (atomic_dec_and_test(&ctx->refcnt))
1265 crypto_free_skcipher(&tfm->base);
1267 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
1269 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
1272 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1273 struct cryptd_hash_ctx *ctx;
1274 struct crypto_ahash *tfm;
1276 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1277 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1278 return ERR_PTR(-EINVAL);
1279 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
1281 return ERR_CAST(tfm);
1282 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1283 crypto_free_ahash(tfm);
1284 return ERR_PTR(-EINVAL);
1287 ctx = crypto_ahash_ctx(tfm);
1288 atomic_set(&ctx->refcnt, 1);
1290 return __cryptd_ahash_cast(tfm);
1292 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1294 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1296 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1300 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1302 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1304 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1307 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1309 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1311 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1313 return atomic_read(&ctx->refcnt) - 1;
1315 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1317 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1319 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1321 if (atomic_dec_and_test(&ctx->refcnt))
1322 crypto_free_ahash(&tfm->base);
1324 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1326 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1329 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1330 struct cryptd_aead_ctx *ctx;
1331 struct crypto_aead *tfm;
1333 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1334 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1335 return ERR_PTR(-EINVAL);
1336 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1338 return ERR_CAST(tfm);
1339 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1340 crypto_free_aead(tfm);
1341 return ERR_PTR(-EINVAL);
1344 ctx = crypto_aead_ctx(tfm);
1345 atomic_set(&ctx->refcnt, 1);
1347 return __cryptd_aead_cast(tfm);
1349 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1351 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1353 struct cryptd_aead_ctx *ctx;
1354 ctx = crypto_aead_ctx(&tfm->base);
1357 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1359 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1361 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1363 return atomic_read(&ctx->refcnt) - 1;
1365 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1367 void cryptd_free_aead(struct cryptd_aead *tfm)
1369 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1371 if (atomic_dec_and_test(&ctx->refcnt))
1372 crypto_free_aead(&tfm->base);
1374 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1376 static int __init cryptd_init(void)
1380 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1384 err = crypto_register_template(&cryptd_tmpl);
1386 cryptd_fini_queue(&queue);
1391 static void __exit cryptd_exit(void)
1393 cryptd_fini_queue(&queue);
1394 crypto_unregister_template(&cryptd_tmpl);
1397 subsys_initcall(cryptd_init);
1398 module_exit(cryptd_exit);
1400 MODULE_LICENSE("GPL");
1401 MODULE_DESCRIPTION("Software async crypto daemon");
1402 MODULE_ALIAS_CRYPTO("cryptd");