1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Symmetric key ciphers.
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
11 #include <crypto/algapi.h>
12 #include <crypto/skcipher.h>
13 #include <linux/list.h>
14 #include <linux/types.h>
19 struct skcipher_instance {
20 void (*free)(struct skcipher_instance *inst);
23 char head[offsetof(struct skcipher_alg, base)];
24 struct crypto_instance base;
26 struct skcipher_alg alg;
30 struct crypto_skcipher_spawn {
31 struct crypto_spawn base;
34 struct skcipher_walk {
47 struct scatter_walk in;
50 struct scatter_walk out;
53 struct list_head buffers;
63 unsigned int blocksize;
65 unsigned int alignmask;
68 static inline struct crypto_instance *skcipher_crypto_instance(
69 struct skcipher_instance *inst)
74 static inline struct skcipher_instance *skcipher_alg_instance(
75 struct crypto_skcipher *skcipher)
77 return container_of(crypto_skcipher_alg(skcipher),
78 struct skcipher_instance, alg);
81 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
83 return crypto_instance_ctx(skcipher_crypto_instance(inst));
86 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
88 req->base.complete(&req->base, err);
91 static inline void crypto_set_skcipher_spawn(
92 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
94 crypto_set_spawn(&spawn->base, inst);
97 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
100 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
102 crypto_drop_spawn(&spawn->base);
105 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
106 struct crypto_skcipher_spawn *spawn)
108 return container_of(spawn->base.alg, struct skcipher_alg, base);
111 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
112 struct crypto_skcipher_spawn *spawn)
114 return crypto_skcipher_spawn_alg(spawn);
117 static inline struct crypto_skcipher *crypto_spawn_skcipher(
118 struct crypto_skcipher_spawn *spawn)
120 return crypto_spawn_tfm2(&spawn->base);
123 static inline void crypto_skcipher_set_reqsize(
124 struct crypto_skcipher *skcipher, unsigned int reqsize)
126 skcipher->reqsize = reqsize;
129 int crypto_register_skcipher(struct skcipher_alg *alg);
130 void crypto_unregister_skcipher(struct skcipher_alg *alg);
131 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
132 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
133 int skcipher_register_instance(struct crypto_template *tmpl,
134 struct skcipher_instance *inst);
136 int skcipher_walk_done(struct skcipher_walk *walk, int err);
137 int skcipher_walk_virt(struct skcipher_walk *walk,
138 struct skcipher_request *req,
140 void skcipher_walk_atomise(struct skcipher_walk *walk);
141 int skcipher_walk_async(struct skcipher_walk *walk,
142 struct skcipher_request *req);
143 int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
145 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
146 struct aead_request *req, bool atomic);
147 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
148 struct aead_request *req, bool atomic);
149 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
151 static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
154 req->base.complete(&req->base, err);
157 static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
159 return req->base.flags;
162 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
164 return crypto_tfm_ctx(&tfm->base);
167 static inline void *skcipher_request_ctx(struct skcipher_request *req)
172 static inline u32 skcipher_request_flags(struct skcipher_request *req)
174 return req->base.flags;
177 static inline unsigned int crypto_skcipher_alg_min_keysize(
178 struct skcipher_alg *alg)
180 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
181 CRYPTO_ALG_TYPE_BLKCIPHER)
182 return alg->base.cra_blkcipher.min_keysize;
184 if (alg->base.cra_ablkcipher.encrypt)
185 return alg->base.cra_ablkcipher.min_keysize;
187 return alg->min_keysize;
190 static inline unsigned int crypto_skcipher_alg_max_keysize(
191 struct skcipher_alg *alg)
193 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
194 CRYPTO_ALG_TYPE_BLKCIPHER)
195 return alg->base.cra_blkcipher.max_keysize;
197 if (alg->base.cra_ablkcipher.encrypt)
198 return alg->base.cra_ablkcipher.max_keysize;
200 return alg->max_keysize;
203 static inline unsigned int crypto_skcipher_alg_chunksize(
204 struct skcipher_alg *alg)
206 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
207 CRYPTO_ALG_TYPE_BLKCIPHER)
208 return alg->base.cra_blocksize;
210 if (alg->base.cra_ablkcipher.encrypt)
211 return alg->base.cra_blocksize;
213 return alg->chunksize;
216 static inline unsigned int crypto_skcipher_alg_walksize(
217 struct skcipher_alg *alg)
219 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
220 CRYPTO_ALG_TYPE_BLKCIPHER)
221 return alg->base.cra_blocksize;
223 if (alg->base.cra_ablkcipher.encrypt)
224 return alg->base.cra_blocksize;
226 return alg->walksize;
230 * crypto_skcipher_chunksize() - obtain chunk size
231 * @tfm: cipher handle
233 * The block size is set to one for ciphers such as CTR. However,
234 * you still need to provide incremental updates in multiples of
235 * the underlying block size as the IV does not have sub-block
236 * granularity. This is known in this API as the chunk size.
238 * Return: chunk size in bytes
240 static inline unsigned int crypto_skcipher_chunksize(
241 struct crypto_skcipher *tfm)
243 return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm));
247 * crypto_skcipher_walksize() - obtain walk size
248 * @tfm: cipher handle
250 * In some cases, algorithms can only perform optimally when operating on
251 * multiple blocks in parallel. This is reflected by the walksize, which
252 * must be a multiple of the chunksize (or equal if the concern does not
255 * Return: walk size in bytes
257 static inline unsigned int crypto_skcipher_walksize(
258 struct crypto_skcipher *tfm)
260 return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
263 /* Helpers for simple block cipher modes of operation */
264 struct skcipher_ctx_simple {
265 struct crypto_cipher *cipher; /* underlying block cipher */
267 static inline struct crypto_cipher *
268 skcipher_cipher_simple(struct crypto_skcipher *tfm)
270 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
274 struct skcipher_instance *
275 skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
276 struct crypto_alg **cipher_alg_ret);
278 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */