1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
5 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/cipher.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/scatterlist.h>
18 #include <linux/slab.h>
20 #include <crypto/xts.h>
21 #include <crypto/b128ops.h>
22 #include <crypto/gf128mul.h>
25 struct crypto_skcipher *child;
26 struct crypto_cipher *tweak;
29 struct xts_instance_ctx {
30 struct crypto_skcipher_spawn spawn;
31 char name[CRYPTO_MAX_ALG_NAME];
34 struct xts_request_ctx {
36 struct scatterlist *tail;
37 struct scatterlist sg[2];
38 struct skcipher_request subreq;
41 static int xts_setkey(struct crypto_skcipher *parent, const u8 *key,
44 struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(parent);
45 struct crypto_skcipher *child;
46 struct crypto_cipher *tweak;
49 err = xts_verify_key(parent, key, keylen);
55 /* we need two cipher instances: one to compute the initial 'tweak'
56 * by encrypting the IV (usually the 'plain' iv) and the other
57 * one to encrypt and decrypt the data */
59 /* tweak cipher, uses Key2 i.e. the second half of *key */
61 crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
62 crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
64 err = crypto_cipher_setkey(tweak, key + keylen, keylen);
68 /* data cipher, uses Key1 i.e. the first half of *key */
70 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
71 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
73 return crypto_skcipher_setkey(child, key, keylen);
77 * We compute the tweak masks twice (both before and after the ECB encryption or
78 * decryption) to avoid having to allocate a temporary buffer and/or make
79 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
80 * just doing the gf128mul_x_ble() calls again.
82 static int xts_xor_tweak(struct skcipher_request *req, bool second_pass,
85 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
86 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
88 const int bs = XTS_BLOCK_SIZE;
89 struct skcipher_walk w;
95 /* set to our TFM to enforce correct alignment: */
96 skcipher_request_set_tfm(req, tfm);
98 err = skcipher_walk_virt(&w, req, false);
101 unsigned int avail = w.nbytes;
105 wsrc = w.src.virt.addr;
106 wdst = w.dst.virt.addr;
110 w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) {
114 gf128mul_x_ble(&t, &t);
116 le128_xor(wdst, &t, wsrc);
117 if (enc && second_pass)
118 gf128mul_x_ble(&rctx->t, &t);
119 skcipher_walk_done(&w, avail - bs);
123 le128_xor(wdst++, &t, wsrc++);
124 gf128mul_x_ble(&t, &t);
125 } while ((avail -= bs) >= bs);
127 err = skcipher_walk_done(&w, avail);
133 static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc)
135 return xts_xor_tweak(req, false, enc);
138 static int xts_xor_tweak_post(struct skcipher_request *req, bool enc)
140 return xts_xor_tweak(req, true, enc);
143 static void xts_cts_done(struct crypto_async_request *areq, int err)
145 struct skcipher_request *req = areq->data;
149 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
151 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
152 le128_xor(&b, &rctx->t, &b);
153 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
156 skcipher_request_complete(req, err);
159 static int xts_cts_final(struct skcipher_request *req,
160 int (*crypt)(struct skcipher_request *req))
162 const struct xts_tfm_ctx *ctx =
163 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
165 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
166 struct skcipher_request *subreq = &rctx->subreq;
167 int tail = req->cryptlen % XTS_BLOCK_SIZE;
171 rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
172 offset - XTS_BLOCK_SIZE);
174 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
176 scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
178 le128_xor(b, &rctx->t, b);
180 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1);
182 skcipher_request_set_tfm(subreq, ctx->child);
183 skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done,
185 skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail,
186 XTS_BLOCK_SIZE, NULL);
192 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
193 le128_xor(b, &rctx->t, b);
194 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
199 static void xts_encrypt_done(struct crypto_async_request *areq, int err)
201 struct skcipher_request *req = areq->data;
204 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
206 rctx->subreq.base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
207 err = xts_xor_tweak_post(req, true);
209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
210 err = xts_cts_final(req, crypto_skcipher_encrypt);
211 if (err == -EINPROGRESS || err == -EBUSY)
216 skcipher_request_complete(req, err);
219 static void xts_decrypt_done(struct crypto_async_request *areq, int err)
221 struct skcipher_request *req = areq->data;
224 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
226 rctx->subreq.base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
227 err = xts_xor_tweak_post(req, false);
229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
230 err = xts_cts_final(req, crypto_skcipher_decrypt);
231 if (err == -EINPROGRESS || err == -EBUSY)
236 skcipher_request_complete(req, err);
239 static int xts_init_crypt(struct skcipher_request *req,
240 crypto_completion_t compl)
242 const struct xts_tfm_ctx *ctx =
243 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
244 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
245 struct skcipher_request *subreq = &rctx->subreq;
247 if (req->cryptlen < XTS_BLOCK_SIZE)
250 skcipher_request_set_tfm(subreq, ctx->child);
251 skcipher_request_set_callback(subreq, req->base.flags, compl, req);
252 skcipher_request_set_crypt(subreq, req->dst, req->dst,
253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
255 /* calculate first value of T */
256 crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
261 static int xts_encrypt(struct skcipher_request *req)
263 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
264 struct skcipher_request *subreq = &rctx->subreq;
267 err = xts_init_crypt(req, xts_encrypt_done) ?:
268 xts_xor_tweak_pre(req, true) ?:
269 crypto_skcipher_encrypt(subreq) ?:
270 xts_xor_tweak_post(req, true);
272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
275 return xts_cts_final(req, crypto_skcipher_encrypt);
278 static int xts_decrypt(struct skcipher_request *req)
280 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
281 struct skcipher_request *subreq = &rctx->subreq;
284 err = xts_init_crypt(req, xts_decrypt_done) ?:
285 xts_xor_tweak_pre(req, false) ?:
286 crypto_skcipher_decrypt(subreq) ?:
287 xts_xor_tweak_post(req, false);
289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
292 return xts_cts_final(req, crypto_skcipher_decrypt);
295 static int xts_init_tfm(struct crypto_skcipher *tfm)
297 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
298 struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
299 struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
300 struct crypto_skcipher *child;
301 struct crypto_cipher *tweak;
303 child = crypto_spawn_skcipher(&ictx->spawn);
305 return PTR_ERR(child);
309 tweak = crypto_alloc_cipher(ictx->name, 0, 0);
311 crypto_free_skcipher(ctx->child);
312 return PTR_ERR(tweak);
317 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
318 sizeof(struct xts_request_ctx));
323 static void xts_exit_tfm(struct crypto_skcipher *tfm)
325 struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
327 crypto_free_skcipher(ctx->child);
328 crypto_free_cipher(ctx->tweak);
331 static void xts_free_instance(struct skcipher_instance *inst)
333 struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
335 crypto_drop_skcipher(&ictx->spawn);
339 static int xts_create(struct crypto_template *tmpl, struct rtattr **tb)
341 struct skcipher_instance *inst;
342 struct xts_instance_ctx *ctx;
343 struct skcipher_alg *alg;
344 const char *cipher_name;
348 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
352 cipher_name = crypto_attr_alg_name(tb[1]);
353 if (IS_ERR(cipher_name))
354 return PTR_ERR(cipher_name);
356 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
360 ctx = skcipher_instance_ctx(inst);
362 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
363 cipher_name, 0, mask);
364 if (err == -ENOENT) {
366 if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
367 cipher_name) >= CRYPTO_MAX_ALG_NAME)
370 err = crypto_grab_skcipher(&ctx->spawn,
371 skcipher_crypto_instance(inst),
378 alg = crypto_skcipher_spawn_alg(&ctx->spawn);
381 if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
384 if (crypto_skcipher_alg_ivsize(alg))
387 err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
393 cipher_name = alg->base.cra_name;
395 /* Alas we screwed up the naming so we have to mangle the
398 if (!strncmp(cipher_name, "ecb(", 4)) {
401 len = strscpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
405 if (ctx->name[len - 1] != ')')
408 ctx->name[len - 1] = 0;
410 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
411 "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
418 inst->alg.base.cra_priority = alg->base.cra_priority;
419 inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
420 inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
421 (__alignof__(u64) - 1);
423 inst->alg.ivsize = XTS_BLOCK_SIZE;
424 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
425 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
427 inst->alg.base.cra_ctxsize = sizeof(struct xts_tfm_ctx);
429 inst->alg.init = xts_init_tfm;
430 inst->alg.exit = xts_exit_tfm;
432 inst->alg.setkey = xts_setkey;
433 inst->alg.encrypt = xts_encrypt;
434 inst->alg.decrypt = xts_decrypt;
436 inst->free = xts_free_instance;
438 err = skcipher_register_instance(tmpl, inst);
441 xts_free_instance(inst);
446 static struct crypto_template xts_tmpl = {
448 .create = xts_create,
449 .module = THIS_MODULE,
452 static int __init xts_module_init(void)
454 return crypto_register_template(&xts_tmpl);
457 static void __exit xts_module_exit(void)
459 crypto_unregister_template(&xts_tmpl);
462 subsys_initcall(xts_module_init);
463 module_exit(xts_module_exit);
465 MODULE_LICENSE("GPL");
466 MODULE_DESCRIPTION("XTS block cipher mode");
467 MODULE_ALIAS_CRYPTO("xts");
468 MODULE_IMPORT_NS(CRYPTO_INTERNAL);
469 MODULE_SOFTDEP("pre: ecb");