crypto: arm64/aes-ccm - Rewrite skcipher walker loop

An often overlooked aspect of the skcipher walker API is that an
error is not just indicated by a non-zero return value, but by the
fact that walk->nbytes is zero.

Thus it is an error to call skcipher_walk_done after getting back
walk->nbytes == 0 from the previous interaction with the walker.

This is because when walk->nbytes is zero the walker is left in
an undefined state and any further calls to it may try to free
uninitialised stack memory.

The arm64 ccm code has to deal with zero-length messages, and
it needs to process data even when walk->nbytes == 0 is returned.
It doesn't have this bug because there is an explicit check for
walk->nbytes != 0 prior to the skcipher_walk_done call.

However, the loop is still sufficiently different from the usual
layout and it appears to have been copied into other code which
then ended up with this bug.  This patch rewrites it to follow the
usual convention of checking walk->nbytes.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Tested-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Herbert Xu 2023-01-30 16:58:51 +08:00
Родитель 808d065ad7
Коммит 57ead1bf1c
1 изменённых файлов: 26 добавлений и 31 удалений

Просмотреть файл

@ -161,43 +161,39 @@ static int ccm_encrypt(struct aead_request *req)
memcpy(buf, req->iv, AES_BLOCK_SIZE); memcpy(buf, req->iv, AES_BLOCK_SIZE);
err = skcipher_walk_aead_encrypt(&walk, req, false); err = skcipher_walk_aead_encrypt(&walk, req, false);
if (unlikely(err))
return err;
kernel_neon_begin(); kernel_neon_begin();
if (req->assoclen) if (req->assoclen)
ccm_calculate_auth_mac(req, mac); ccm_calculate_auth_mac(req, mac);
do { while (walk.nbytes) {
u32 tail = walk.nbytes % AES_BLOCK_SIZE; u32 tail = walk.nbytes % AES_BLOCK_SIZE;
bool final = walk.nbytes == walk.total;
if (walk.nbytes == walk.total) if (final)
tail = 0; tail = 0;
ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr, ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk.nbytes - tail, ctx->key_enc, walk.nbytes - tail, ctx->key_enc,
num_rounds(ctx), mac, walk.iv); num_rounds(ctx), mac, walk.iv);
if (walk.nbytes == walk.total) if (!final)
ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); kernel_neon_end();
err = skcipher_walk_done(&walk, tail);
if (!final)
kernel_neon_begin();
}
kernel_neon_end(); ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
if (walk.nbytes) { kernel_neon_end();
err = skcipher_walk_done(&walk, tail);
if (unlikely(err))
return err;
if (unlikely(walk.nbytes))
kernel_neon_begin();
}
} while (walk.nbytes);
/* copy authtag to end of dst */ /* copy authtag to end of dst */
scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
crypto_aead_authsize(aead), 1); crypto_aead_authsize(aead), 1);
return 0; return err;
} }
static int ccm_decrypt(struct aead_request *req) static int ccm_decrypt(struct aead_request *req)
@ -219,37 +215,36 @@ static int ccm_decrypt(struct aead_request *req)
memcpy(buf, req->iv, AES_BLOCK_SIZE); memcpy(buf, req->iv, AES_BLOCK_SIZE);
err = skcipher_walk_aead_decrypt(&walk, req, false); err = skcipher_walk_aead_decrypt(&walk, req, false);
if (unlikely(err))
return err;
kernel_neon_begin(); kernel_neon_begin();
if (req->assoclen) if (req->assoclen)
ccm_calculate_auth_mac(req, mac); ccm_calculate_auth_mac(req, mac);
do { while (walk.nbytes) {
u32 tail = walk.nbytes % AES_BLOCK_SIZE; u32 tail = walk.nbytes % AES_BLOCK_SIZE;
bool final = walk.nbytes == walk.total;
if (walk.nbytes == walk.total) if (final)
tail = 0; tail = 0;
ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr, ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk.nbytes - tail, ctx->key_enc, walk.nbytes - tail, ctx->key_enc,
num_rounds(ctx), mac, walk.iv); num_rounds(ctx), mac, walk.iv);
if (walk.nbytes == walk.total) if (!final)
ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); kernel_neon_end();
err = skcipher_walk_done(&walk, tail);
if (!final)
kernel_neon_begin();
}
kernel_neon_end(); ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
if (walk.nbytes) { kernel_neon_end();
err = skcipher_walk_done(&walk, tail);
if (unlikely(err)) if (unlikely(err))
return err; return err;
if (unlikely(walk.nbytes))
kernel_neon_begin();
}
} while (walk.nbytes);
/* compare calculated auth tag with the stored one */ /* compare calculated auth tag with the stored one */
scatterwalk_map_and_copy(buf, req->src, scatterwalk_map_and_copy(buf, req->src,