1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Symmetric key ciphers.
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
11 #include <crypto/algapi.h>
12 #include <crypto/internal/cipher.h>
13 #include <crypto/skcipher.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
18 * Set this if your algorithm is sync but needs a reqsize larger
19 * than MAX_SYNC_SKCIPHER_REQSIZE.
21 * Reuse bit that is specific to hash algorithms.
23 #define CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE CRYPTO_ALG_OPTIONAL_KEY
28 struct skcipher_instance {
29 void (*free)(struct skcipher_instance *inst);
32 char head[offsetof(struct skcipher_alg, base)];
33 struct crypto_instance base;
35 struct skcipher_alg alg;
39 struct crypto_skcipher_spawn {
40 struct crypto_spawn base;
43 struct skcipher_walk {
56 struct scatter_walk in;
59 struct scatter_walk out;
62 struct list_head buffers;
72 unsigned int blocksize;
74 unsigned int alignmask;
77 static inline struct crypto_instance *skcipher_crypto_instance(
78 struct skcipher_instance *inst)
83 static inline struct skcipher_instance *skcipher_alg_instance(
84 struct crypto_skcipher *skcipher)
86 return container_of(crypto_skcipher_alg(skcipher),
87 struct skcipher_instance, alg);
90 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
92 return crypto_instance_ctx(skcipher_crypto_instance(inst));
95 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
97 crypto_request_complete(&req->base, err);
100 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
101 struct crypto_instance *inst,
102 const char *name, u32 type, u32 mask);
104 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
106 crypto_drop_spawn(&spawn->base);
109 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
110 struct crypto_skcipher_spawn *spawn)
112 return container_of(spawn->base.alg, struct skcipher_alg, base);
115 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
116 struct crypto_skcipher_spawn *spawn)
118 return crypto_skcipher_spawn_alg(spawn);
121 static inline struct crypto_skcipher *crypto_spawn_skcipher(
122 struct crypto_skcipher_spawn *spawn)
124 return crypto_spawn_tfm2(&spawn->base);
127 static inline void crypto_skcipher_set_reqsize(
128 struct crypto_skcipher *skcipher, unsigned int reqsize)
130 skcipher->reqsize = reqsize;
133 static inline void crypto_skcipher_set_reqsize_dma(
134 struct crypto_skcipher *skcipher, unsigned int reqsize)
136 reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
137 skcipher->reqsize = reqsize;
140 int crypto_register_skcipher(struct skcipher_alg *alg);
141 void crypto_unregister_skcipher(struct skcipher_alg *alg);
142 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
143 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
144 int skcipher_register_instance(struct crypto_template *tmpl,
145 struct skcipher_instance *inst);
147 int skcipher_walk_done(struct skcipher_walk *walk, int err);
148 int skcipher_walk_virt(struct skcipher_walk *walk,
149 struct skcipher_request *req,
151 int skcipher_walk_async(struct skcipher_walk *walk,
152 struct skcipher_request *req);
153 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
154 struct aead_request *req, bool atomic);
155 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
156 struct aead_request *req, bool atomic);
157 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
159 static inline void skcipher_walk_abort(struct skcipher_walk *walk)
161 skcipher_walk_done(walk, -ECANCELED);
164 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
166 return crypto_tfm_ctx(&tfm->base);
169 static inline void *crypto_skcipher_ctx_dma(struct crypto_skcipher *tfm)
171 return crypto_tfm_ctx_dma(&tfm->base);
174 static inline void *skcipher_request_ctx(struct skcipher_request *req)
179 static inline void *skcipher_request_ctx_dma(struct skcipher_request *req)
181 unsigned int align = crypto_dma_align();
183 if (align <= crypto_tfm_ctx_alignment())
186 return PTR_ALIGN(skcipher_request_ctx(req), align);
189 static inline u32 skcipher_request_flags(struct skcipher_request *req)
191 return req->base.flags;
194 static inline unsigned int crypto_skcipher_alg_min_keysize(
195 struct skcipher_alg *alg)
197 return alg->min_keysize;
200 static inline unsigned int crypto_skcipher_alg_max_keysize(
201 struct skcipher_alg *alg)
203 return alg->max_keysize;
206 static inline unsigned int crypto_skcipher_alg_walksize(
207 struct skcipher_alg *alg)
209 return alg->walksize;
213 * crypto_skcipher_walksize() - obtain walk size
214 * @tfm: cipher handle
216 * In some cases, algorithms can only perform optimally when operating on
217 * multiple blocks in parallel. This is reflected by the walksize, which
218 * must be a multiple of the chunksize (or equal if the concern does not
221 * Return: walk size in bytes
223 static inline unsigned int crypto_skcipher_walksize(
224 struct crypto_skcipher *tfm)
226 return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
229 /* Helpers for simple block cipher modes of operation */
230 struct skcipher_ctx_simple {
231 struct crypto_cipher *cipher; /* underlying block cipher */
233 static inline struct crypto_cipher *
234 skcipher_cipher_simple(struct crypto_skcipher *tfm)
236 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
241 struct skcipher_instance *skcipher_alloc_instance_simple(
242 struct crypto_template *tmpl, struct rtattr **tb);
244 static inline struct crypto_alg *skcipher_ialg_simple(
245 struct skcipher_instance *inst)
247 struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
249 return crypto_spawn_cipher_alg(spawn);
252 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */