From 0f533e67d26f228ea5dfdacc8a4bdeb487af5208 Mon Sep 17 00:00:00 2001 From: Eric Biggers Date: Thu, 31 Jan 2019 23:51:36 -0800 Subject: [PATCH] crypto: aegis - fix handling chunked inputs The generic AEGIS implementations all fail the improved AEAD tests because they produce the wrong result with some data layouts. The issue is that they assume that if the skcipher_walk API gives 'nbytes' not aligned to the walksize (a.k.a. walk.stride), then it is the end of the data. In fact, this can happen before the end. Fix them. Fixes: f606a88e5823 ("crypto: aegis - Add generic AEGIS AEAD implementations") Cc: # v4.18+ Cc: Ondrej Mosnacek Signed-off-by: Eric Biggers Reviewed-by: Ondrej Mosnacek Signed-off-by: Herbert Xu --- crypto/aegis128.c | 14 +++++++------- crypto/aegis128l.c | 14 +++++++------- crypto/aegis256.c | 14 +++++++------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/crypto/aegis128.c b/crypto/aegis128.c index 96e078a..3718a83 100644 --- a/crypto/aegis128.c +++ b/crypto/aegis128.c @@ -286,19 +286,19 @@ static void crypto_aegis128_process_crypt(struct aegis_state *state, const struct aegis128_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c index a210e77..275a861 100644 --- a/crypto/aegis128l.c +++ b/crypto/aegis128l.c @@ -349,19 +349,19 @@ static void crypto_aegis128l_process_crypt(struct aegis_state *state, const struct aegis128l_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } diff --git a/crypto/aegis256.c b/crypto/aegis256.c index 49882a2..ecd6b7f 100644 --- a/crypto/aegis256.c +++ b/crypto/aegis256.c @@ -299,19 +299,19 @@ static void crypto_aegis256_process_crypt(struct aegis_state *state, const struct aegis256_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } -- 2.7.4