1 // SPDX-License-Identifier: GPL-2.0
3 * HCTR2 length-preserving encryption mode
5 * Copyright 2021 Google LLC
10 * HCTR2 is a length-preserving encryption mode that is efficient on
11 * processors with instructions to accelerate AES and carryless
12 * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
13 * processors with the ARMv8 crypto extensions.
15 * For more details, see the paper: "Length-preserving encryption with HCTR2"
16 * (https://eprint.iacr.org/2021/1441.pdf)
19 #include <crypto/internal/cipher.h>
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/skcipher.h>
22 #include <crypto/polyval.h>
23 #include <crypto/scatterwalk.h>
24 #include <linux/module.h>
26 #define BLOCKCIPHER_BLOCK_SIZE 16
29 * The specification allows variable-length tweaks, but Linux's crypto API
30 * currently only allows algorithms to support a single length. The "natural"
31 * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
32 * the best performance. But longer tweaks are useful for fscrypt, to avoid
33 * needing to derive per-file keys. So instead we use two blocks, or 32 bytes.
37 struct hctr2_instance_ctx {
38 struct crypto_cipher_spawn blockcipher_spawn;
39 struct crypto_skcipher_spawn xctr_spawn;
40 struct crypto_shash_spawn polyval_spawn;
43 struct hctr2_tfm_ctx {
44 struct crypto_cipher *blockcipher;
45 struct crypto_skcipher *xctr;
46 struct crypto_shash *polyval;
47 u8 L[BLOCKCIPHER_BLOCK_SIZE];
48 int hashed_tweak_offset;
50 * This struct is allocated with extra space for two exported hash
51 * states. Since the hash state size is not known at compile-time, we
52 * can't add these to the struct directly.
54 * hashed_tweaklen_divisible;
55 * hashed_tweaklen_remainder;
59 struct hctr2_request_ctx {
60 u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
61 u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
62 struct scatterlist *bulk_part_dst;
63 struct scatterlist *bulk_part_src;
64 struct scatterlist sg_src[2];
65 struct scatterlist sg_dst[2];
67 * Sub-request sizes are unknown at compile-time, so they need to go
68 * after the members with known sizes.
71 struct shash_desc hash_desc;
72 struct skcipher_request xctr_req;
75 * This struct is allocated with extra space for one exported hash
76 * state. Since the hash state size is not known at compile-time, we
77 * can't add it to the struct directly.
83 static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx,
86 u8 *p = (u8 *)tctx + sizeof(*tctx);
88 if (has_remainder) /* For messages not a multiple of block length */
89 p += crypto_shash_statesize(tctx->polyval);
93 static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx,
94 struct hctr2_request_ctx *rctx)
96 return (u8 *)rctx + tctx->hashed_tweak_offset;
100 * The input data for each HCTR2 hash step begins with a 16-byte block that
101 * contains the tweak length and a flag that indicates whether the input is evenly
102 * divisible into blocks. Since this implementation only supports one tweak
103 * length, we precompute the two hash states resulting from hashing the two
104 * possible values of this initial block. This reduces by one block the amount of
105 * data that needs to be hashed for each encryption/decryption
107 * These precomputed hashes are stored in hctr2_tfm_ctx.
109 static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder)
111 SHASH_DESC_ON_STACK(shash, tfm->polyval);
112 __le64 tweak_length_block[2];
115 shash->tfm = tctx->polyval;
116 memset(tweak_length_block, 0, sizeof(tweak_length_block));
118 tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder);
119 err = crypto_shash_init(shash);
122 err = crypto_shash_update(shash, (u8 *)tweak_length_block,
126 return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder));
129 static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
132 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
133 u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
136 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
137 crypto_cipher_set_flags(tctx->blockcipher,
138 crypto_skcipher_get_flags(tfm) &
139 CRYPTO_TFM_REQ_MASK);
140 err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
144 crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
145 crypto_skcipher_set_flags(tctx->xctr,
146 crypto_skcipher_get_flags(tfm) &
147 CRYPTO_TFM_REQ_MASK);
148 err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
152 memset(hbar, 0, sizeof(hbar));
153 crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
155 memset(tctx->L, 0, sizeof(tctx->L));
157 crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
159 crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK);
160 crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) &
161 CRYPTO_TFM_REQ_MASK);
162 err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE);
165 memzero_explicit(hbar, sizeof(hbar));
167 return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false);
170 static int hctr2_hash_tweak(struct skcipher_request *req)
172 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
174 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
175 struct shash_desc *hash_desc = &rctx->u.hash_desc;
177 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE;
179 hash_desc->tfm = tctx->polyval;
180 err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder));
183 err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE);
187 // Store the hashed tweak, since we need it when computing both
188 // H(T || N) and H(T || V).
189 return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx));
192 static int hctr2_hash_message(struct skcipher_request *req,
193 struct scatterlist *sgl,
194 u8 digest[POLYVAL_DIGEST_SIZE])
196 static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 };
197 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
198 struct shash_desc *hash_desc = &rctx->u.hash_desc;
199 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
200 struct sg_mapping_iter miter;
201 unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE;
206 sg_miter_start(&miter, sgl, sg_nents(sgl),
207 SG_MITER_FROM_SG | SG_MITER_ATOMIC);
208 for (i = 0; i < bulk_len; i += n) {
209 sg_miter_next(&miter);
210 n = min_t(unsigned int, miter.length, bulk_len - i);
211 err = crypto_shash_update(hash_desc, miter.addr, n);
215 sg_miter_stop(&miter);
221 err = crypto_shash_update(hash_desc, padding,
222 BLOCKCIPHER_BLOCK_SIZE - remainder);
226 return crypto_shash_final(hash_desc, digest);
229 static int hctr2_finish(struct skcipher_request *req)
231 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
232 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
233 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
234 u8 digest[POLYVAL_DIGEST_SIZE];
235 struct shash_desc *hash_desc = &rctx->u.hash_desc;
238 // U = UU ^ H(T || V)
239 // or M = MM ^ H(T || N)
240 hash_desc->tfm = tctx->polyval;
241 err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx));
244 err = hctr2_hash_message(req, rctx->bulk_part_dst, digest);
247 crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE);
249 // Copy U (or M) into dst scatterlist
250 scatterwalk_map_and_copy(rctx->first_block, req->dst,
251 0, BLOCKCIPHER_BLOCK_SIZE, 1);
255 static void hctr2_xctr_done(struct crypto_async_request *areq,
258 struct skcipher_request *req = areq->data;
261 err = hctr2_finish(req);
263 skcipher_request_complete(req, err);
266 static int hctr2_crypt(struct skcipher_request *req, bool enc)
268 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
269 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
270 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
271 u8 digest[POLYVAL_DIGEST_SIZE];
272 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
275 // Requests must be at least one block
276 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
279 // Copy M (or U) into a temporary buffer
280 scatterwalk_map_and_copy(rctx->first_block, req->src,
281 0, BLOCKCIPHER_BLOCK_SIZE, 0);
283 // Create scatterlists for N and V
284 rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
285 BLOCKCIPHER_BLOCK_SIZE);
286 rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
287 BLOCKCIPHER_BLOCK_SIZE);
289 // MM = M ^ H(T || N)
290 // or UU = U ^ H(T || V)
291 err = hctr2_hash_tweak(req);
294 err = hctr2_hash_message(req, rctx->bulk_part_src, digest);
297 crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
302 crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
305 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
309 crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
310 crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
314 skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
315 skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
316 rctx->bulk_part_dst, bulk_len,
318 skcipher_request_set_callback(&rctx->u.xctr_req,
320 hctr2_xctr_done, req);
321 return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
325 static int hctr2_encrypt(struct skcipher_request *req)
327 return hctr2_crypt(req, true);
330 static int hctr2_decrypt(struct skcipher_request *req)
332 return hctr2_crypt(req, false);
335 static int hctr2_init_tfm(struct crypto_skcipher *tfm)
337 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
338 struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
339 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
340 struct crypto_skcipher *xctr;
341 struct crypto_cipher *blockcipher;
342 struct crypto_shash *polyval;
343 unsigned int subreq_size;
346 xctr = crypto_spawn_skcipher(&ictx->xctr_spawn);
348 return PTR_ERR(xctr);
350 blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
351 if (IS_ERR(blockcipher)) {
352 err = PTR_ERR(blockcipher);
356 polyval = crypto_spawn_shash(&ictx->polyval_spawn);
357 if (IS_ERR(polyval)) {
358 err = PTR_ERR(polyval);
359 goto err_free_blockcipher;
363 tctx->blockcipher = blockcipher;
364 tctx->polyval = polyval;
366 BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) !=
367 sizeof(struct hctr2_request_ctx));
368 subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) +
369 crypto_shash_descsize(polyval),
370 sizeof_field(struct hctr2_request_ctx, u.xctr_req) +
371 crypto_skcipher_reqsize(xctr));
373 tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) +
375 crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset +
376 crypto_shash_statesize(polyval));
379 err_free_blockcipher:
380 crypto_free_cipher(blockcipher);
382 crypto_free_skcipher(xctr);
386 static void hctr2_exit_tfm(struct crypto_skcipher *tfm)
388 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
390 crypto_free_cipher(tctx->blockcipher);
391 crypto_free_skcipher(tctx->xctr);
392 crypto_free_shash(tctx->polyval);
395 static void hctr2_free_instance(struct skcipher_instance *inst)
397 struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
399 crypto_drop_cipher(&ictx->blockcipher_spawn);
400 crypto_drop_skcipher(&ictx->xctr_spawn);
401 crypto_drop_shash(&ictx->polyval_spawn);
405 static int hctr2_create_common(struct crypto_template *tmpl,
407 const char *xctr_name,
408 const char *polyval_name)
411 struct skcipher_instance *inst;
412 struct hctr2_instance_ctx *ictx;
413 struct skcipher_alg *xctr_alg;
414 struct crypto_alg *blockcipher_alg;
415 struct shash_alg *polyval_alg;
416 char blockcipher_name[CRYPTO_MAX_ALG_NAME];
420 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
424 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
427 ictx = skcipher_instance_ctx(inst);
429 /* Stream cipher, xctr(block_cipher) */
430 err = crypto_grab_skcipher(&ictx->xctr_spawn,
431 skcipher_crypto_instance(inst),
435 xctr_alg = crypto_spawn_skcipher_alg(&ictx->xctr_spawn);
438 if (strncmp(xctr_alg->base.cra_name, "xctr(", 5))
440 len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5,
441 sizeof(blockcipher_name));
444 if (blockcipher_name[len - 1] != ')')
446 blockcipher_name[len - 1] = 0;
448 /* Block cipher, e.g. "aes" */
449 err = crypto_grab_cipher(&ictx->blockcipher_spawn,
450 skcipher_crypto_instance(inst),
451 blockcipher_name, 0, mask);
454 blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
456 /* Require blocksize of 16 bytes */
458 if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
461 /* Polyval ε-∆U hash function */
462 err = crypto_grab_shash(&ictx->polyval_spawn,
463 skcipher_crypto_instance(inst),
464 polyval_name, 0, mask);
467 polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn);
469 /* Ensure Polyval is being used */
471 if (strcmp(polyval_alg->base.cra_name, "polyval") != 0)
474 /* Instance fields */
477 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)",
478 blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
480 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
482 xctr_alg->base.cra_driver_name,
483 polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
486 inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
487 inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) +
488 polyval_alg->statesize * 2;
489 inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask |
490 polyval_alg->base.cra_alignmask;
492 * The hash function is called twice, so it is weighted higher than the
493 * xctr and blockcipher.
495 inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
496 4 * polyval_alg->base.cra_priority +
497 blockcipher_alg->cra_priority) / 7;
499 inst->alg.setkey = hctr2_setkey;
500 inst->alg.encrypt = hctr2_encrypt;
501 inst->alg.decrypt = hctr2_decrypt;
502 inst->alg.init = hctr2_init_tfm;
503 inst->alg.exit = hctr2_exit_tfm;
504 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(xctr_alg);
505 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(xctr_alg);
506 inst->alg.ivsize = TWEAK_SIZE;
508 inst->free = hctr2_free_instance;
510 err = skcipher_register_instance(tmpl, inst);
513 hctr2_free_instance(inst);
518 static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb)
520 const char *xctr_name;
521 const char *polyval_name;
523 xctr_name = crypto_attr_alg_name(tb[1]);
524 if (IS_ERR(xctr_name))
525 return PTR_ERR(xctr_name);
527 polyval_name = crypto_attr_alg_name(tb[2]);
528 if (IS_ERR(polyval_name))
529 return PTR_ERR(polyval_name);
531 return hctr2_create_common(tmpl, tb, xctr_name, polyval_name);
534 static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb)
536 const char *blockcipher_name;
537 char xctr_name[CRYPTO_MAX_ALG_NAME];
539 blockcipher_name = crypto_attr_alg_name(tb[1]);
540 if (IS_ERR(blockcipher_name))
541 return PTR_ERR(blockcipher_name);
543 if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)",
544 blockcipher_name) >= CRYPTO_MAX_ALG_NAME)
545 return -ENAMETOOLONG;
547 return hctr2_create_common(tmpl, tb, xctr_name, "polyval");
550 static struct crypto_template hctr2_tmpls[] = {
552 /* hctr2_base(xctr_name, polyval_name) */
553 .name = "hctr2_base",
554 .create = hctr2_create_base,
555 .module = THIS_MODULE,
557 /* hctr2(blockcipher_name) */
559 .create = hctr2_create,
560 .module = THIS_MODULE,
564 static int __init hctr2_module_init(void)
566 return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls));
569 static void __exit hctr2_module_exit(void)
571 return crypto_unregister_templates(hctr2_tmpls,
572 ARRAY_SIZE(hctr2_tmpls));
575 subsys_initcall(hctr2_module_init);
576 module_exit(hctr2_module_exit);
578 MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
579 MODULE_LICENSE("GPL v2");
580 MODULE_ALIAS_CRYPTO("hctr2");
581 MODULE_IMPORT_NS(CRYPTO_INTERNAL);