1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * The AEGIS-128 Authenticated-Encryption Algorithm
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/jump_label.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
25 #define AEGIS128_NONCE_SIZE 16
26 #define AEGIS128_STATE_BLOCKS 5
27 #define AEGIS128_KEY_SIZE 16
28 #define AEGIS128_MIN_AUTH_SIZE 8
29 #define AEGIS128_MAX_AUTH_SIZE 16
32 union aegis_block blocks[AEGIS128_STATE_BLOCKS];
36 union aegis_block key;
39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd);
41 static const union aegis_block crypto_aegis_const[2] = {
43 cpu_to_le64(U64_C(0x0d08050302010100)),
44 cpu_to_le64(U64_C(0x6279e99059372215)),
47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
48 cpu_to_le64(U64_C(0xdd28b57342311120)),
52 static bool aegis128_do_simd(void)
54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD
55 if (static_branch_likely(&have_simd))
56 return crypto_simd_usable();
61 static void crypto_aegis128_update(struct aegis_state *state)
63 union aegis_block tmp;
66 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
67 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
68 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
70 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
73 static void crypto_aegis128_update_a(struct aegis_state *state,
74 const union aegis_block *msg,
77 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
78 crypto_aegis128_update_simd(state, msg);
82 crypto_aegis128_update(state);
83 crypto_aegis_block_xor(&state->blocks[0], msg);
86 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg,
89 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
90 crypto_aegis128_update_simd(state, msg);
94 crypto_aegis128_update(state);
95 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
98 static void crypto_aegis128_init(struct aegis_state *state,
99 const union aegis_block *key,
102 union aegis_block key_iv;
106 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE);
108 state->blocks[0] = key_iv;
109 state->blocks[1] = crypto_aegis_const[1];
110 state->blocks[2] = crypto_aegis_const[0];
111 state->blocks[3] = *key;
112 state->blocks[4] = *key;
114 crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]);
115 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]);
117 for (i = 0; i < 5; i++) {
118 crypto_aegis128_update_a(state, key, false);
119 crypto_aegis128_update_a(state, &key_iv, false);
123 static void crypto_aegis128_ad(struct aegis_state *state,
124 const u8 *src, unsigned int size,
127 if (AEGIS_ALIGNED(src)) {
128 const union aegis_block *src_blk =
129 (const union aegis_block *)src;
131 while (size >= AEGIS_BLOCK_SIZE) {
132 crypto_aegis128_update_a(state, src_blk, do_simd);
134 size -= AEGIS_BLOCK_SIZE;
138 while (size >= AEGIS_BLOCK_SIZE) {
139 crypto_aegis128_update_u(state, src, do_simd);
141 size -= AEGIS_BLOCK_SIZE;
142 src += AEGIS_BLOCK_SIZE;
147 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst,
148 const u8 *src, unsigned int size)
150 memzero_explicit(dst, size);
153 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
154 const u8 *src, unsigned int size)
156 union aegis_block tmp;
158 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
159 while (size >= AEGIS_BLOCK_SIZE) {
160 union aegis_block *dst_blk =
161 (union aegis_block *)dst;
162 const union aegis_block *src_blk =
163 (const union aegis_block *)src;
165 tmp = state->blocks[2];
166 crypto_aegis_block_and(&tmp, &state->blocks[3]);
167 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
168 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
169 crypto_aegis_block_xor(&tmp, src_blk);
171 crypto_aegis128_update_a(state, src_blk, false);
175 size -= AEGIS_BLOCK_SIZE;
176 src += AEGIS_BLOCK_SIZE;
177 dst += AEGIS_BLOCK_SIZE;
180 while (size >= AEGIS_BLOCK_SIZE) {
181 tmp = state->blocks[2];
182 crypto_aegis_block_and(&tmp, &state->blocks[3]);
183 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
184 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
185 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
187 crypto_aegis128_update_u(state, src, false);
189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
191 size -= AEGIS_BLOCK_SIZE;
192 src += AEGIS_BLOCK_SIZE;
193 dst += AEGIS_BLOCK_SIZE;
198 union aegis_block msg = {};
199 memcpy(msg.bytes, src, size);
201 tmp = state->blocks[2];
202 crypto_aegis_block_and(&tmp, &state->blocks[3]);
203 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
204 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
206 crypto_aegis128_update_a(state, &msg, false);
208 crypto_aegis_block_xor(&msg, &tmp);
210 memcpy(dst, msg.bytes, size);
214 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
215 const u8 *src, unsigned int size)
217 union aegis_block tmp;
219 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
220 while (size >= AEGIS_BLOCK_SIZE) {
221 union aegis_block *dst_blk =
222 (union aegis_block *)dst;
223 const union aegis_block *src_blk =
224 (const union aegis_block *)src;
226 tmp = state->blocks[2];
227 crypto_aegis_block_and(&tmp, &state->blocks[3]);
228 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
229 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
230 crypto_aegis_block_xor(&tmp, src_blk);
232 crypto_aegis128_update_a(state, &tmp, false);
236 size -= AEGIS_BLOCK_SIZE;
237 src += AEGIS_BLOCK_SIZE;
238 dst += AEGIS_BLOCK_SIZE;
241 while (size >= AEGIS_BLOCK_SIZE) {
242 tmp = state->blocks[2];
243 crypto_aegis_block_and(&tmp, &state->blocks[3]);
244 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
245 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
246 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
248 crypto_aegis128_update_a(state, &tmp, false);
250 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
252 size -= AEGIS_BLOCK_SIZE;
253 src += AEGIS_BLOCK_SIZE;
254 dst += AEGIS_BLOCK_SIZE;
259 union aegis_block msg = {};
260 memcpy(msg.bytes, src, size);
262 tmp = state->blocks[2];
263 crypto_aegis_block_and(&tmp, &state->blocks[3]);
264 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
265 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
266 crypto_aegis_block_xor(&msg, &tmp);
268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
270 crypto_aegis128_update_a(state, &msg, false);
272 memcpy(dst, msg.bytes, size);
276 static void crypto_aegis128_process_ad(struct aegis_state *state,
277 struct scatterlist *sg_src,
278 unsigned int assoclen,
281 struct scatter_walk walk;
282 union aegis_block buf;
283 unsigned int pos = 0;
285 scatterwalk_start(&walk, sg_src);
286 while (assoclen != 0) {
287 unsigned int size = scatterwalk_clamp(&walk, assoclen);
288 unsigned int left = size;
289 void *mapped = scatterwalk_map(&walk);
290 const u8 *src = (const u8 *)mapped;
292 if (pos + size >= AEGIS_BLOCK_SIZE) {
294 unsigned int fill = AEGIS_BLOCK_SIZE - pos;
295 memcpy(buf.bytes + pos, src, fill);
296 crypto_aegis128_update_a(state, &buf, do_simd);
302 crypto_aegis128_ad(state, src, left, do_simd);
303 src += left & ~(AEGIS_BLOCK_SIZE - 1);
304 left &= AEGIS_BLOCK_SIZE - 1;
307 memcpy(buf.bytes + pos, src, left);
311 scatterwalk_unmap(mapped);
312 scatterwalk_advance(&walk, size);
313 scatterwalk_done(&walk, 0, assoclen);
317 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
318 crypto_aegis128_update_a(state, &buf, do_simd);
322 static __always_inline
323 int crypto_aegis128_process_crypt(struct aegis_state *state,
324 struct skcipher_walk *walk,
325 void (*crypt)(struct aegis_state *state,
326 u8 *dst, const u8 *src,
331 while (walk->nbytes) {
332 unsigned int nbytes = walk->nbytes;
334 if (nbytes < walk->total)
335 nbytes = round_down(nbytes, walk->stride);
337 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes);
339 err = skcipher_walk_done(walk, walk->nbytes - nbytes);
344 static void crypto_aegis128_final(struct aegis_state *state,
345 union aegis_block *tag_xor,
346 u64 assoclen, u64 cryptlen)
348 u64 assocbits = assoclen * 8;
349 u64 cryptbits = cryptlen * 8;
351 union aegis_block tmp;
354 tmp.words64[0] = cpu_to_le64(assocbits);
355 tmp.words64[1] = cpu_to_le64(cryptbits);
357 crypto_aegis_block_xor(&tmp, &state->blocks[3]);
359 for (i = 0; i < 7; i++)
360 crypto_aegis128_update_a(state, &tmp, false);
362 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
363 crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
366 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
369 struct aegis_ctx *ctx = crypto_aead_ctx(aead);
371 if (keylen != AEGIS128_KEY_SIZE)
374 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
378 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
379 unsigned int authsize)
381 if (authsize > AEGIS128_MAX_AUTH_SIZE)
383 if (authsize < AEGIS128_MIN_AUTH_SIZE)
388 static int crypto_aegis128_encrypt_generic(struct aead_request *req)
390 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
391 union aegis_block tag = {};
392 unsigned int authsize = crypto_aead_authsize(tfm);
393 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
394 unsigned int cryptlen = req->cryptlen;
395 struct skcipher_walk walk;
396 struct aegis_state state;
398 skcipher_walk_aead_encrypt(&walk, req, false);
399 crypto_aegis128_init(&state, &ctx->key, req->iv);
400 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
401 crypto_aegis128_process_crypt(&state, &walk,
402 crypto_aegis128_encrypt_chunk);
403 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
405 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
410 static int crypto_aegis128_decrypt_generic(struct aead_request *req)
412 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
413 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
414 union aegis_block tag;
415 unsigned int authsize = crypto_aead_authsize(tfm);
416 unsigned int cryptlen = req->cryptlen - authsize;
417 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
418 struct skcipher_walk walk;
419 struct aegis_state state;
421 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
424 skcipher_walk_aead_decrypt(&walk, req, false);
425 crypto_aegis128_init(&state, &ctx->key, req->iv);
426 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
427 crypto_aegis128_process_crypt(&state, &walk,
428 crypto_aegis128_decrypt_chunk);
429 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
431 if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) {
433 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
435 * "3. If verification fails, the decrypted plaintext and the
436 * wrong authentication tag should not be given as output."
438 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
440 skcipher_walk_aead_decrypt(&walk, req, false);
441 crypto_aegis128_process_crypt(NULL, &walk,
442 crypto_aegis128_wipe_chunk);
443 memzero_explicit(&tag, sizeof(tag));
449 static int crypto_aegis128_encrypt_simd(struct aead_request *req)
451 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
452 union aegis_block tag = {};
453 unsigned int authsize = crypto_aead_authsize(tfm);
454 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
455 unsigned int cryptlen = req->cryptlen;
456 struct skcipher_walk walk;
457 struct aegis_state state;
459 if (!aegis128_do_simd())
460 return crypto_aegis128_encrypt_generic(req);
462 skcipher_walk_aead_encrypt(&walk, req, false);
463 crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
464 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
465 crypto_aegis128_process_crypt(&state, &walk,
466 crypto_aegis128_encrypt_chunk_simd);
467 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0);
469 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
474 static int crypto_aegis128_decrypt_simd(struct aead_request *req)
476 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
477 union aegis_block tag;
478 unsigned int authsize = crypto_aead_authsize(tfm);
479 unsigned int cryptlen = req->cryptlen - authsize;
480 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
481 struct skcipher_walk walk;
482 struct aegis_state state;
484 if (!aegis128_do_simd())
485 return crypto_aegis128_decrypt_generic(req);
487 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
490 skcipher_walk_aead_decrypt(&walk, req, false);
491 crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
492 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
493 crypto_aegis128_process_crypt(&state, &walk,
494 crypto_aegis128_decrypt_chunk_simd);
496 if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen,
497 cryptlen, authsize))) {
498 skcipher_walk_aead_decrypt(&walk, req, false);
499 crypto_aegis128_process_crypt(NULL, &walk,
500 crypto_aegis128_wipe_chunk);
506 static struct aead_alg crypto_aegis128_alg_generic = {
507 .setkey = crypto_aegis128_setkey,
508 .setauthsize = crypto_aegis128_setauthsize,
509 .encrypt = crypto_aegis128_encrypt_generic,
510 .decrypt = crypto_aegis128_decrypt_generic,
512 .ivsize = AEGIS128_NONCE_SIZE,
513 .maxauthsize = AEGIS128_MAX_AUTH_SIZE,
514 .chunksize = AEGIS_BLOCK_SIZE,
516 .base.cra_blocksize = 1,
517 .base.cra_ctxsize = sizeof(struct aegis_ctx),
518 .base.cra_alignmask = 0,
519 .base.cra_priority = 100,
520 .base.cra_name = "aegis128",
521 .base.cra_driver_name = "aegis128-generic",
522 .base.cra_module = THIS_MODULE,
525 static struct aead_alg crypto_aegis128_alg_simd = {
526 .setkey = crypto_aegis128_setkey,
527 .setauthsize = crypto_aegis128_setauthsize,
528 .encrypt = crypto_aegis128_encrypt_simd,
529 .decrypt = crypto_aegis128_decrypt_simd,
531 .ivsize = AEGIS128_NONCE_SIZE,
532 .maxauthsize = AEGIS128_MAX_AUTH_SIZE,
533 .chunksize = AEGIS_BLOCK_SIZE,
535 .base.cra_blocksize = 1,
536 .base.cra_ctxsize = sizeof(struct aegis_ctx),
537 .base.cra_alignmask = 0,
538 .base.cra_priority = 200,
539 .base.cra_name = "aegis128",
540 .base.cra_driver_name = "aegis128-simd",
541 .base.cra_module = THIS_MODULE,
544 static int __init crypto_aegis128_module_init(void)
548 ret = crypto_register_aead(&crypto_aegis128_alg_generic);
552 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
553 crypto_aegis128_have_simd()) {
554 ret = crypto_register_aead(&crypto_aegis128_alg_simd);
556 crypto_unregister_aead(&crypto_aegis128_alg_generic);
559 static_branch_enable(&have_simd);
564 static void __exit crypto_aegis128_module_exit(void)
566 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
567 crypto_aegis128_have_simd())
568 crypto_unregister_aead(&crypto_aegis128_alg_simd);
570 crypto_unregister_aead(&crypto_aegis128_alg_generic);
573 subsys_initcall(crypto_aegis128_module_init);
574 module_exit(crypto_aegis128_module_exit);
576 MODULE_LICENSE("GPL");
577 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
578 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
579 MODULE_ALIAS_CRYPTO("aegis128");
580 MODULE_ALIAS_CRYPTO("aegis128-generic");
581 MODULE_ALIAS_CRYPTO("aegis128-simd");