1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C) 2019 Samsung Electronics Co., Ltd.
6 #include <linux/kernel.h>
7 #include <linux/string.h>
9 #include <linux/slab.h>
10 #include <linux/wait.h>
11 #include <linux/sched.h>
14 #include "crypto_ctx.h"
16 struct crypto_ctx_list {
19 struct list_head idle_ctx;
20 wait_queue_head_t ctx_wait;
23 static struct crypto_ctx_list ctx_list;
25 static inline void free_aead(struct crypto_aead *aead)
28 crypto_free_aead(aead);
31 static void free_shash(struct shash_desc *shash)
34 crypto_free_shash(shash->tfm);
39 static struct crypto_aead *alloc_aead(int id)
41 struct crypto_aead *tfm = NULL;
44 case CRYPTO_AEAD_AES_GCM:
45 tfm = crypto_alloc_aead("gcm(aes)", 0, 0);
47 case CRYPTO_AEAD_AES_CCM:
48 tfm = crypto_alloc_aead("ccm(aes)", 0, 0);
51 pr_err("Does not support encrypt ahead(id : %d)\n", id);
56 pr_err("Failed to alloc encrypt aead : %ld\n", PTR_ERR(tfm));
63 static struct shash_desc *alloc_shash_desc(int id)
65 struct crypto_shash *tfm = NULL;
66 struct shash_desc *shash;
69 case CRYPTO_SHASH_HMACMD5:
70 tfm = crypto_alloc_shash("hmac(md5)", 0, 0);
72 case CRYPTO_SHASH_HMACSHA256:
73 tfm = crypto_alloc_shash("hmac(sha256)", 0, 0);
75 case CRYPTO_SHASH_CMACAES:
76 tfm = crypto_alloc_shash("cmac(aes)", 0, 0);
78 case CRYPTO_SHASH_SHA256:
79 tfm = crypto_alloc_shash("sha256", 0, 0);
81 case CRYPTO_SHASH_SHA512:
82 tfm = crypto_alloc_shash("sha512", 0, 0);
84 case CRYPTO_SHASH_MD4:
85 tfm = crypto_alloc_shash("md4", 0, 0);
87 case CRYPTO_SHASH_MD5:
88 tfm = crypto_alloc_shash("md5", 0, 0);
97 shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm),
100 crypto_free_shash(tfm);
106 static void ctx_free(struct ksmbd_crypto_ctx *ctx)
110 for (i = 0; i < CRYPTO_SHASH_MAX; i++)
111 free_shash(ctx->desc[i]);
112 for (i = 0; i < CRYPTO_AEAD_MAX; i++)
113 free_aead(ctx->ccmaes[i]);
117 static struct ksmbd_crypto_ctx *ksmbd_find_crypto_ctx(void)
119 struct ksmbd_crypto_ctx *ctx;
122 spin_lock(&ctx_list.ctx_lock);
123 if (!list_empty(&ctx_list.idle_ctx)) {
124 ctx = list_entry(ctx_list.idle_ctx.next,
125 struct ksmbd_crypto_ctx,
127 list_del(&ctx->list);
128 spin_unlock(&ctx_list.ctx_lock);
132 if (ctx_list.avail_ctx > num_online_cpus()) {
133 spin_unlock(&ctx_list.ctx_lock);
134 wait_event(ctx_list.ctx_wait,
135 !list_empty(&ctx_list.idle_ctx));
139 ctx_list.avail_ctx++;
140 spin_unlock(&ctx_list.ctx_lock);
142 ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
144 spin_lock(&ctx_list.ctx_lock);
145 ctx_list.avail_ctx--;
146 spin_unlock(&ctx_list.ctx_lock);
147 wait_event(ctx_list.ctx_wait,
148 !list_empty(&ctx_list.idle_ctx));
156 void ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx *ctx)
161 spin_lock(&ctx_list.ctx_lock);
162 if (ctx_list.avail_ctx <= num_online_cpus()) {
163 list_add(&ctx->list, &ctx_list.idle_ctx);
164 spin_unlock(&ctx_list.ctx_lock);
165 wake_up(&ctx_list.ctx_wait);
169 ctx_list.avail_ctx--;
170 spin_unlock(&ctx_list.ctx_lock);
174 static struct ksmbd_crypto_ctx *____crypto_shash_ctx_find(int id)
176 struct ksmbd_crypto_ctx *ctx;
178 if (id >= CRYPTO_SHASH_MAX)
181 ctx = ksmbd_find_crypto_ctx();
185 ctx->desc[id] = alloc_shash_desc(id);
188 ksmbd_release_crypto_ctx(ctx);
192 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacmd5(void)
194 return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACMD5);
197 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacsha256(void)
199 return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACSHA256);
202 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_cmacaes(void)
204 return ____crypto_shash_ctx_find(CRYPTO_SHASH_CMACAES);
207 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha256(void)
209 return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA256);
212 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha512(void)
214 return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA512);
217 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_md4(void)
219 return ____crypto_shash_ctx_find(CRYPTO_SHASH_MD4);
222 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_md5(void)
224 return ____crypto_shash_ctx_find(CRYPTO_SHASH_MD5);
227 static struct ksmbd_crypto_ctx *____crypto_aead_ctx_find(int id)
229 struct ksmbd_crypto_ctx *ctx;
231 if (id >= CRYPTO_AEAD_MAX)
234 ctx = ksmbd_find_crypto_ctx();
238 ctx->ccmaes[id] = alloc_aead(id);
241 ksmbd_release_crypto_ctx(ctx);
245 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_gcm(void)
247 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_GCM);
250 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_ccm(void)
252 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_CCM);
255 void ksmbd_crypto_destroy(void)
257 struct ksmbd_crypto_ctx *ctx;
259 while (!list_empty(&ctx_list.idle_ctx)) {
260 ctx = list_entry(ctx_list.idle_ctx.next,
261 struct ksmbd_crypto_ctx,
263 list_del(&ctx->list);
268 int ksmbd_crypto_create(void)
270 struct ksmbd_crypto_ctx *ctx;
272 spin_lock_init(&ctx_list.ctx_lock);
273 INIT_LIST_HEAD(&ctx_list.idle_ctx);
274 init_waitqueue_head(&ctx_list.ctx_wait);
275 ctx_list.avail_ctx = 1;
277 ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
280 list_add(&ctx->list, &ctx_list.idle_ctx);