2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/cryptd.h>
24 #include <linux/atomic.h>
25 #include <linux/err.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/list.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/sched.h>
32 #include <linux/slab.h>
33 #include <linux/workqueue.h>
35 static unsigned int cryptd_max_cpu_qlen = 1000;
36 module_param(cryptd_max_cpu_qlen, uint, 0);
37 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
39 static struct workqueue_struct *cryptd_wq;
41 struct cryptd_cpu_queue {
42 struct crypto_queue queue;
43 struct work_struct work;
47 struct cryptd_cpu_queue __percpu *cpu_queue;
50 struct cryptd_instance_ctx {
51 struct crypto_spawn spawn;
52 struct cryptd_queue *queue;
55 struct skcipherd_instance_ctx {
56 struct crypto_skcipher_spawn spawn;
57 struct cryptd_queue *queue;
60 struct hashd_instance_ctx {
61 struct crypto_shash_spawn spawn;
62 struct cryptd_queue *queue;
65 struct aead_instance_ctx {
66 struct crypto_aead_spawn aead_spawn;
67 struct cryptd_queue *queue;
70 struct cryptd_skcipher_ctx {
72 struct crypto_sync_skcipher *child;
75 struct cryptd_skcipher_request_ctx {
76 crypto_completion_t complete;
79 struct cryptd_hash_ctx {
81 struct crypto_shash *child;
84 struct cryptd_hash_request_ctx {
85 crypto_completion_t complete;
86 struct shash_desc desc;
89 struct cryptd_aead_ctx {
91 struct crypto_aead *child;
94 struct cryptd_aead_request_ctx {
95 crypto_completion_t complete;
98 static void cryptd_queue_worker(struct work_struct *work);
100 static int cryptd_init_queue(struct cryptd_queue *queue,
101 unsigned int max_cpu_qlen)
104 struct cryptd_cpu_queue *cpu_queue;
106 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
107 if (!queue->cpu_queue)
109 for_each_possible_cpu(cpu) {
110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
111 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
112 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
114 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
118 static void cryptd_fini_queue(struct cryptd_queue *queue)
121 struct cryptd_cpu_queue *cpu_queue;
123 for_each_possible_cpu(cpu) {
124 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
125 BUG_ON(cpu_queue->queue.qlen);
127 free_percpu(queue->cpu_queue);
130 static int cryptd_enqueue_request(struct cryptd_queue *queue,
131 struct crypto_async_request *request)
134 struct cryptd_cpu_queue *cpu_queue;
138 cpu_queue = this_cpu_ptr(queue->cpu_queue);
139 err = crypto_enqueue_request(&cpu_queue->queue, request);
141 refcnt = crypto_tfm_ctx(request->tfm);
146 queue_work_on(cpu, cryptd_wq, &cpu_queue->work);
148 if (!atomic_read(refcnt))
159 /* Called in workqueue context, do one real cryption work (via
160 * req->complete) and reschedule itself if there are more work to
162 static void cryptd_queue_worker(struct work_struct *work)
164 struct cryptd_cpu_queue *cpu_queue;
165 struct crypto_async_request *req, *backlog;
167 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
169 * Only handle one request at a time to avoid hogging crypto workqueue.
170 * preempt_disable/enable is used to prevent being preempted by
171 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
172 * cryptd_enqueue_request() being accessed from software interrupts.
176 backlog = crypto_get_backlog(&cpu_queue->queue);
177 req = crypto_dequeue_request(&cpu_queue->queue);
185 backlog->complete(backlog, -EINPROGRESS);
186 req->complete(req, 0);
188 if (cpu_queue->queue.qlen)
189 queue_work(cryptd_wq, &cpu_queue->work);
192 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
194 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
195 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
199 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
202 struct crypto_attr_type *algt;
204 algt = crypto_get_attr_type(tb);
208 *type |= algt->type & CRYPTO_ALG_INTERNAL;
209 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
212 static int cryptd_init_instance(struct crypto_instance *inst,
213 struct crypto_alg *alg)
215 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
217 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
218 return -ENAMETOOLONG;
220 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
222 inst->alg.cra_priority = alg->cra_priority + 50;
223 inst->alg.cra_blocksize = alg->cra_blocksize;
224 inst->alg.cra_alignmask = alg->cra_alignmask;
229 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
233 struct crypto_instance *inst;
236 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
238 return ERR_PTR(-ENOMEM);
240 inst = (void *)(p + head);
242 err = cryptd_init_instance(inst, alg);
255 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
256 const u8 *key, unsigned int keylen)
258 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
259 struct crypto_sync_skcipher *child = ctx->child;
262 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
263 crypto_sync_skcipher_set_flags(child,
264 crypto_skcipher_get_flags(parent) &
265 CRYPTO_TFM_REQ_MASK);
266 err = crypto_sync_skcipher_setkey(child, key, keylen);
267 crypto_skcipher_set_flags(parent,
268 crypto_sync_skcipher_get_flags(child) &
269 CRYPTO_TFM_RES_MASK);
273 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
275 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
276 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
277 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
278 int refcnt = atomic_read(&ctx->refcnt);
281 rctx->complete(&req->base, err);
284 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
285 crypto_free_skcipher(tfm);
288 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
291 struct skcipher_request *req = skcipher_request_cast(base);
292 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
293 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
294 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
295 struct crypto_sync_skcipher *child = ctx->child;
296 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
298 if (unlikely(err == -EINPROGRESS))
301 skcipher_request_set_sync_tfm(subreq, child);
302 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
304 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
307 err = crypto_skcipher_encrypt(subreq);
308 skcipher_request_zero(subreq);
310 req->base.complete = rctx->complete;
313 cryptd_skcipher_complete(req, err);
316 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
319 struct skcipher_request *req = skcipher_request_cast(base);
320 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
321 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
322 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
323 struct crypto_sync_skcipher *child = ctx->child;
324 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
326 if (unlikely(err == -EINPROGRESS))
329 skcipher_request_set_sync_tfm(subreq, child);
330 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
332 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
335 err = crypto_skcipher_decrypt(subreq);
336 skcipher_request_zero(subreq);
338 req->base.complete = rctx->complete;
341 cryptd_skcipher_complete(req, err);
344 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
345 crypto_completion_t compl)
347 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
348 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
349 struct cryptd_queue *queue;
351 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
352 rctx->complete = req->base.complete;
353 req->base.complete = compl;
355 return cryptd_enqueue_request(queue, &req->base);
358 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
360 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
363 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
365 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
368 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
370 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
371 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
372 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
373 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
374 struct crypto_skcipher *cipher;
376 cipher = crypto_spawn_skcipher(spawn);
378 return PTR_ERR(cipher);
380 ctx->child = (struct crypto_sync_skcipher *)cipher;
381 crypto_skcipher_set_reqsize(
382 tfm, sizeof(struct cryptd_skcipher_request_ctx));
386 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
388 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
390 crypto_free_sync_skcipher(ctx->child);
393 static void cryptd_skcipher_free(struct skcipher_instance *inst)
395 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
397 crypto_drop_skcipher(&ctx->spawn);
400 static int cryptd_create_skcipher(struct crypto_template *tmpl,
402 struct cryptd_queue *queue)
404 struct skcipherd_instance_ctx *ctx;
405 struct skcipher_instance *inst;
406 struct skcipher_alg *alg;
413 mask = CRYPTO_ALG_ASYNC;
415 cryptd_check_internal(tb, &type, &mask);
417 name = crypto_attr_alg_name(tb[1]);
419 return PTR_ERR(name);
421 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
425 ctx = skcipher_instance_ctx(inst);
428 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
429 err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
433 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
434 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
436 goto out_drop_skcipher;
438 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
439 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
441 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
442 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
443 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
444 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
446 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
448 inst->alg.init = cryptd_skcipher_init_tfm;
449 inst->alg.exit = cryptd_skcipher_exit_tfm;
451 inst->alg.setkey = cryptd_skcipher_setkey;
452 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
453 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
455 inst->free = cryptd_skcipher_free;
457 err = skcipher_register_instance(tmpl, inst);
460 crypto_drop_skcipher(&ctx->spawn);
467 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
469 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
470 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
471 struct crypto_shash_spawn *spawn = &ictx->spawn;
472 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
473 struct crypto_shash *hash;
475 hash = crypto_spawn_shash(spawn);
477 return PTR_ERR(hash);
480 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
481 sizeof(struct cryptd_hash_request_ctx) +
482 crypto_shash_descsize(hash));
486 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
488 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
490 crypto_free_shash(ctx->child);
493 static int cryptd_hash_setkey(struct crypto_ahash *parent,
494 const u8 *key, unsigned int keylen)
496 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
497 struct crypto_shash *child = ctx->child;
500 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
501 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
502 CRYPTO_TFM_REQ_MASK);
503 err = crypto_shash_setkey(child, key, keylen);
504 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
505 CRYPTO_TFM_RES_MASK);
509 static int cryptd_hash_enqueue(struct ahash_request *req,
510 crypto_completion_t compl)
512 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
513 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
514 struct cryptd_queue *queue =
515 cryptd_get_queue(crypto_ahash_tfm(tfm));
517 rctx->complete = req->base.complete;
518 req->base.complete = compl;
520 return cryptd_enqueue_request(queue, &req->base);
523 static void cryptd_hash_complete(struct ahash_request *req, int err)
525 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
526 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
527 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
528 int refcnt = atomic_read(&ctx->refcnt);
531 rctx->complete(&req->base, err);
534 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
535 crypto_free_ahash(tfm);
538 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
540 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
541 struct crypto_shash *child = ctx->child;
542 struct ahash_request *req = ahash_request_cast(req_async);
543 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
544 struct shash_desc *desc = &rctx->desc;
546 if (unlikely(err == -EINPROGRESS))
551 err = crypto_shash_init(desc);
553 req->base.complete = rctx->complete;
556 cryptd_hash_complete(req, err);
559 static int cryptd_hash_init_enqueue(struct ahash_request *req)
561 return cryptd_hash_enqueue(req, cryptd_hash_init);
564 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
566 struct ahash_request *req = ahash_request_cast(req_async);
567 struct cryptd_hash_request_ctx *rctx;
569 rctx = ahash_request_ctx(req);
571 if (unlikely(err == -EINPROGRESS))
574 err = shash_ahash_update(req, &rctx->desc);
576 req->base.complete = rctx->complete;
579 cryptd_hash_complete(req, err);
582 static int cryptd_hash_update_enqueue(struct ahash_request *req)
584 return cryptd_hash_enqueue(req, cryptd_hash_update);
587 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
589 struct ahash_request *req = ahash_request_cast(req_async);
590 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
592 if (unlikely(err == -EINPROGRESS))
595 err = crypto_shash_final(&rctx->desc, req->result);
597 req->base.complete = rctx->complete;
600 cryptd_hash_complete(req, err);
603 static int cryptd_hash_final_enqueue(struct ahash_request *req)
605 return cryptd_hash_enqueue(req, cryptd_hash_final);
608 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
610 struct ahash_request *req = ahash_request_cast(req_async);
611 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
613 if (unlikely(err == -EINPROGRESS))
616 err = shash_ahash_finup(req, &rctx->desc);
618 req->base.complete = rctx->complete;
621 cryptd_hash_complete(req, err);
624 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
626 return cryptd_hash_enqueue(req, cryptd_hash_finup);
629 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
631 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
632 struct crypto_shash *child = ctx->child;
633 struct ahash_request *req = ahash_request_cast(req_async);
634 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
635 struct shash_desc *desc = &rctx->desc;
637 if (unlikely(err == -EINPROGRESS))
642 err = shash_ahash_digest(req, desc);
644 req->base.complete = rctx->complete;
647 cryptd_hash_complete(req, err);
650 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
652 return cryptd_hash_enqueue(req, cryptd_hash_digest);
655 static int cryptd_hash_export(struct ahash_request *req, void *out)
657 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
659 return crypto_shash_export(&rctx->desc, out);
662 static int cryptd_hash_import(struct ahash_request *req, const void *in)
664 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
665 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
666 struct shash_desc *desc = cryptd_shash_desc(req);
668 desc->tfm = ctx->child;
670 return crypto_shash_import(desc, in);
673 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
674 struct cryptd_queue *queue)
676 struct hashd_instance_ctx *ctx;
677 struct ahash_instance *inst;
678 struct shash_alg *salg;
679 struct crypto_alg *alg;
684 cryptd_check_internal(tb, &type, &mask);
686 salg = shash_attr_alg(tb[1], type, mask);
688 return PTR_ERR(salg);
691 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
697 ctx = ahash_instance_ctx(inst);
700 err = crypto_init_shash_spawn(&ctx->spawn, salg,
701 ahash_crypto_instance(inst));
705 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
706 (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
707 CRYPTO_ALG_OPTIONAL_KEY));
709 inst->alg.halg.digestsize = salg->digestsize;
710 inst->alg.halg.statesize = salg->statesize;
711 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
713 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
714 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
716 inst->alg.init = cryptd_hash_init_enqueue;
717 inst->alg.update = cryptd_hash_update_enqueue;
718 inst->alg.final = cryptd_hash_final_enqueue;
719 inst->alg.finup = cryptd_hash_finup_enqueue;
720 inst->alg.export = cryptd_hash_export;
721 inst->alg.import = cryptd_hash_import;
722 if (crypto_shash_alg_has_setkey(salg))
723 inst->alg.setkey = cryptd_hash_setkey;
724 inst->alg.digest = cryptd_hash_digest_enqueue;
726 err = ahash_register_instance(tmpl, inst);
728 crypto_drop_shash(&ctx->spawn);
738 static int cryptd_aead_setkey(struct crypto_aead *parent,
739 const u8 *key, unsigned int keylen)
741 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
742 struct crypto_aead *child = ctx->child;
744 return crypto_aead_setkey(child, key, keylen);
747 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
748 unsigned int authsize)
750 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
751 struct crypto_aead *child = ctx->child;
753 return crypto_aead_setauthsize(child, authsize);
756 static void cryptd_aead_crypt(struct aead_request *req,
757 struct crypto_aead *child,
759 int (*crypt)(struct aead_request *req))
761 struct cryptd_aead_request_ctx *rctx;
762 struct cryptd_aead_ctx *ctx;
763 crypto_completion_t compl;
764 struct crypto_aead *tfm;
767 rctx = aead_request_ctx(req);
768 compl = rctx->complete;
770 tfm = crypto_aead_reqtfm(req);
772 if (unlikely(err == -EINPROGRESS))
774 aead_request_set_tfm(req, child);
778 ctx = crypto_aead_ctx(tfm);
779 refcnt = atomic_read(&ctx->refcnt);
782 compl(&req->base, err);
785 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
786 crypto_free_aead(tfm);
789 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
791 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
792 struct crypto_aead *child = ctx->child;
793 struct aead_request *req;
795 req = container_of(areq, struct aead_request, base);
796 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
799 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
801 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
802 struct crypto_aead *child = ctx->child;
803 struct aead_request *req;
805 req = container_of(areq, struct aead_request, base);
806 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
809 static int cryptd_aead_enqueue(struct aead_request *req,
810 crypto_completion_t compl)
812 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
813 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
814 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
816 rctx->complete = req->base.complete;
817 req->base.complete = compl;
818 return cryptd_enqueue_request(queue, &req->base);
821 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
823 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
826 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
828 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
831 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
833 struct aead_instance *inst = aead_alg_instance(tfm);
834 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
835 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
836 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
837 struct crypto_aead *cipher;
839 cipher = crypto_spawn_aead(spawn);
841 return PTR_ERR(cipher);
844 crypto_aead_set_reqsize(
845 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
846 crypto_aead_reqsize(cipher)));
850 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
852 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
853 crypto_free_aead(ctx->child);
856 static int cryptd_create_aead(struct crypto_template *tmpl,
858 struct cryptd_queue *queue)
860 struct aead_instance_ctx *ctx;
861 struct aead_instance *inst;
862 struct aead_alg *alg;
865 u32 mask = CRYPTO_ALG_ASYNC;
868 cryptd_check_internal(tb, &type, &mask);
870 name = crypto_attr_alg_name(tb[1]);
872 return PTR_ERR(name);
874 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
878 ctx = aead_instance_ctx(inst);
881 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
882 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
886 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
887 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
891 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
892 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
893 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
895 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
896 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
898 inst->alg.init = cryptd_aead_init_tfm;
899 inst->alg.exit = cryptd_aead_exit_tfm;
900 inst->alg.setkey = cryptd_aead_setkey;
901 inst->alg.setauthsize = cryptd_aead_setauthsize;
902 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
903 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
905 err = aead_register_instance(tmpl, inst);
908 crypto_drop_aead(&ctx->aead_spawn);
915 static struct cryptd_queue queue;
917 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
919 struct crypto_attr_type *algt;
921 algt = crypto_get_attr_type(tb);
923 return PTR_ERR(algt);
925 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
926 case CRYPTO_ALG_TYPE_BLKCIPHER:
927 return cryptd_create_skcipher(tmpl, tb, &queue);
928 case CRYPTO_ALG_TYPE_DIGEST:
929 return cryptd_create_hash(tmpl, tb, &queue);
930 case CRYPTO_ALG_TYPE_AEAD:
931 return cryptd_create_aead(tmpl, tb, &queue);
937 static void cryptd_free(struct crypto_instance *inst)
939 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
940 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
941 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
943 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
944 case CRYPTO_ALG_TYPE_AHASH:
945 crypto_drop_shash(&hctx->spawn);
946 kfree(ahash_instance(inst));
948 case CRYPTO_ALG_TYPE_AEAD:
949 crypto_drop_aead(&aead_ctx->aead_spawn);
950 kfree(aead_instance(inst));
953 crypto_drop_spawn(&ctx->spawn);
958 static struct crypto_template cryptd_tmpl = {
960 .create = cryptd_create,
962 .module = THIS_MODULE,
965 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
968 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
969 struct cryptd_skcipher_ctx *ctx;
970 struct crypto_skcipher *tfm;
972 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
973 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
974 return ERR_PTR(-EINVAL);
976 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
978 return ERR_CAST(tfm);
980 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
981 crypto_free_skcipher(tfm);
982 return ERR_PTR(-EINVAL);
985 ctx = crypto_skcipher_ctx(tfm);
986 atomic_set(&ctx->refcnt, 1);
988 return container_of(tfm, struct cryptd_skcipher, base);
990 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
992 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
994 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
996 return &ctx->child->base;
998 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
1000 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
1002 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1004 return atomic_read(&ctx->refcnt) - 1;
1006 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
1008 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
1010 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1012 if (atomic_dec_and_test(&ctx->refcnt))
1013 crypto_free_skcipher(&tfm->base);
1015 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
1017 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
1020 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1021 struct cryptd_hash_ctx *ctx;
1022 struct crypto_ahash *tfm;
1024 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1025 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1026 return ERR_PTR(-EINVAL);
1027 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
1029 return ERR_CAST(tfm);
1030 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1031 crypto_free_ahash(tfm);
1032 return ERR_PTR(-EINVAL);
1035 ctx = crypto_ahash_ctx(tfm);
1036 atomic_set(&ctx->refcnt, 1);
1038 return __cryptd_ahash_cast(tfm);
1040 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1042 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1044 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1048 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1050 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1052 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1055 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1057 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1059 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1061 return atomic_read(&ctx->refcnt) - 1;
1063 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1065 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1067 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1069 if (atomic_dec_and_test(&ctx->refcnt))
1070 crypto_free_ahash(&tfm->base);
1072 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1074 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1077 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1078 struct cryptd_aead_ctx *ctx;
1079 struct crypto_aead *tfm;
1081 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1082 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1083 return ERR_PTR(-EINVAL);
1084 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1086 return ERR_CAST(tfm);
1087 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1088 crypto_free_aead(tfm);
1089 return ERR_PTR(-EINVAL);
1092 ctx = crypto_aead_ctx(tfm);
1093 atomic_set(&ctx->refcnt, 1);
1095 return __cryptd_aead_cast(tfm);
1097 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1099 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1101 struct cryptd_aead_ctx *ctx;
1102 ctx = crypto_aead_ctx(&tfm->base);
1105 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1107 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1109 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1111 return atomic_read(&ctx->refcnt) - 1;
1113 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1115 void cryptd_free_aead(struct cryptd_aead *tfm)
1117 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1119 if (atomic_dec_and_test(&ctx->refcnt))
1120 crypto_free_aead(&tfm->base);
1122 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1124 static int __init cryptd_init(void)
1128 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1133 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1135 goto err_destroy_wq;
1137 err = crypto_register_template(&cryptd_tmpl);
1139 goto err_fini_queue;
1144 cryptd_fini_queue(&queue);
1146 destroy_workqueue(cryptd_wq);
1150 static void __exit cryptd_exit(void)
1152 destroy_workqueue(cryptd_wq);
1153 cryptd_fini_queue(&queue);
1154 crypto_unregister_template(&cryptd_tmpl);
1157 subsys_initcall(cryptd_init);
1158 module_exit(cryptd_exit);
1160 MODULE_LICENSE("GPL");
1161 MODULE_DESCRIPTION("Software async crypto daemon");
1162 MODULE_ALIAS_CRYPTO("cryptd");