1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
4 * (sector sizes which are not a multiple of 16 bytes are,
5 * however currently unsupported)
7 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
10 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/scatterlist.h>
19 #include <linux/slab.h>
21 #include <crypto/xts.h>
22 #include <crypto/b128ops.h>
23 #include <crypto/gf128mul.h>
26 struct crypto_skcipher *child;
27 struct crypto_cipher *tweak;
30 struct xts_instance_ctx {
31 struct crypto_skcipher_spawn spawn;
32 char name[CRYPTO_MAX_ALG_NAME];
37 struct skcipher_request subreq;
40 static int setkey(struct crypto_skcipher *parent, const u8 *key,
43 struct priv *ctx = crypto_skcipher_ctx(parent);
44 struct crypto_skcipher *child;
45 struct crypto_cipher *tweak;
48 err = xts_verify_key(parent, key, keylen);
54 /* we need two cipher instances: one to compute the initial 'tweak'
55 * by encrypting the IV (usually the 'plain' iv) and the other
56 * one to encrypt and decrypt the data */
58 /* tweak cipher, uses Key2 i.e. the second half of *key */
60 crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
61 crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
63 err = crypto_cipher_setkey(tweak, key + keylen, keylen);
64 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(tweak) &
69 /* data cipher, uses Key1 i.e. the first half of *key */
71 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
72 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
74 err = crypto_skcipher_setkey(child, key, keylen);
75 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
82 * We compute the tweak masks twice (both before and after the ECB encryption or
83 * decryption) to avoid having to allocate a temporary buffer and/or make
84 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
85 * just doing the gf128mul_x_ble() calls again.
87 static int xor_tweak(struct skcipher_request *req, bool second_pass)
89 struct rctx *rctx = skcipher_request_ctx(req);
90 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
91 const int bs = XTS_BLOCK_SIZE;
92 struct skcipher_walk w;
98 /* set to our TFM to enforce correct alignment: */
99 skcipher_request_set_tfm(req, tfm);
101 err = skcipher_walk_virt(&w, req, false);
104 unsigned int avail = w.nbytes;
108 wsrc = w.src.virt.addr;
109 wdst = w.dst.virt.addr;
112 le128_xor(wdst++, &t, wsrc++);
113 gf128mul_x_ble(&t, &t);
114 } while ((avail -= bs) >= bs);
116 err = skcipher_walk_done(&w, avail);
122 static int xor_tweak_pre(struct skcipher_request *req)
124 return xor_tweak(req, false);
127 static int xor_tweak_post(struct skcipher_request *req)
129 return xor_tweak(req, true);
132 static void crypt_done(struct crypto_async_request *areq, int err)
134 struct skcipher_request *req = areq->data;
137 struct rctx *rctx = skcipher_request_ctx(req);
139 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
140 err = xor_tweak_post(req);
143 skcipher_request_complete(req, err);
146 static void init_crypt(struct skcipher_request *req)
148 struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
149 struct rctx *rctx = skcipher_request_ctx(req);
150 struct skcipher_request *subreq = &rctx->subreq;
152 skcipher_request_set_tfm(subreq, ctx->child);
153 skcipher_request_set_callback(subreq, req->base.flags, crypt_done, req);
154 skcipher_request_set_crypt(subreq, req->dst, req->dst,
155 req->cryptlen, NULL);
157 /* calculate first value of T */
158 crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
161 static int encrypt(struct skcipher_request *req)
163 struct rctx *rctx = skcipher_request_ctx(req);
164 struct skcipher_request *subreq = &rctx->subreq;
167 return xor_tweak_pre(req) ?:
168 crypto_skcipher_encrypt(subreq) ?:
172 static int decrypt(struct skcipher_request *req)
174 struct rctx *rctx = skcipher_request_ctx(req);
175 struct skcipher_request *subreq = &rctx->subreq;
178 return xor_tweak_pre(req) ?:
179 crypto_skcipher_decrypt(subreq) ?:
183 static int init_tfm(struct crypto_skcipher *tfm)
185 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
186 struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
187 struct priv *ctx = crypto_skcipher_ctx(tfm);
188 struct crypto_skcipher *child;
189 struct crypto_cipher *tweak;
191 child = crypto_spawn_skcipher(&ictx->spawn);
193 return PTR_ERR(child);
197 tweak = crypto_alloc_cipher(ictx->name, 0, 0);
199 crypto_free_skcipher(ctx->child);
200 return PTR_ERR(tweak);
205 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
206 sizeof(struct rctx));
211 static void exit_tfm(struct crypto_skcipher *tfm)
213 struct priv *ctx = crypto_skcipher_ctx(tfm);
215 crypto_free_skcipher(ctx->child);
216 crypto_free_cipher(ctx->tweak);
219 static void free(struct skcipher_instance *inst)
221 crypto_drop_skcipher(skcipher_instance_ctx(inst));
225 static int create(struct crypto_template *tmpl, struct rtattr **tb)
227 struct skcipher_instance *inst;
228 struct crypto_attr_type *algt;
229 struct xts_instance_ctx *ctx;
230 struct skcipher_alg *alg;
231 const char *cipher_name;
235 algt = crypto_get_attr_type(tb);
237 return PTR_ERR(algt);
239 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
242 cipher_name = crypto_attr_alg_name(tb[1]);
243 if (IS_ERR(cipher_name))
244 return PTR_ERR(cipher_name);
246 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
250 ctx = skcipher_instance_ctx(inst);
252 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
254 mask = crypto_requires_off(algt->type, algt->mask,
255 CRYPTO_ALG_NEED_FALLBACK |
258 err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, mask);
259 if (err == -ENOENT) {
261 if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
262 cipher_name) >= CRYPTO_MAX_ALG_NAME)
265 err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, mask);
271 alg = crypto_skcipher_spawn_alg(&ctx->spawn);
274 if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
277 if (crypto_skcipher_alg_ivsize(alg))
280 err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
286 cipher_name = alg->base.cra_name;
288 /* Alas we screwed up the naming so we have to mangle the
291 if (!strncmp(cipher_name, "ecb(", 4)) {
294 len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
295 if (len < 2 || len >= sizeof(ctx->name))
298 if (ctx->name[len - 1] != ')')
301 ctx->name[len - 1] = 0;
303 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
304 "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
311 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
312 inst->alg.base.cra_priority = alg->base.cra_priority;
313 inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
314 inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
315 (__alignof__(u64) - 1);
317 inst->alg.ivsize = XTS_BLOCK_SIZE;
318 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
319 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
321 inst->alg.base.cra_ctxsize = sizeof(struct priv);
323 inst->alg.init = init_tfm;
324 inst->alg.exit = exit_tfm;
326 inst->alg.setkey = setkey;
327 inst->alg.encrypt = encrypt;
328 inst->alg.decrypt = decrypt;
332 err = skcipher_register_instance(tmpl, inst);
340 crypto_drop_skcipher(&ctx->spawn);
346 static struct crypto_template crypto_tmpl = {
349 .module = THIS_MODULE,
352 static int __init crypto_module_init(void)
354 return crypto_register_template(&crypto_tmpl);
357 static void __exit crypto_module_exit(void)
359 crypto_unregister_template(&crypto_tmpl);
362 subsys_initcall(crypto_module_init);
363 module_exit(crypto_module_exit);
365 MODULE_LICENSE("GPL");
366 MODULE_DESCRIPTION("XTS block cipher mode");
367 MODULE_ALIAS_CRYPTO("xts");