unsigned int ivsize = crypto_skcipher_ivsize(tfm);
struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
u32 mode = ctx->mode;
- void *backup_iv = NULL;
/* when activating SS, the default FIFO space is SS_RX_DEFAULT(32) */
u32 rx_cnt = SS_RX_DEFAULT;
u32 tx_cnt = 0;
}
if (areq->iv && ivsize > 0 && mode & SS_DECRYPTION) {
- backup_iv = kzalloc(ivsize, GFP_KERNEL);
- if (!backup_iv)
- return -ENOMEM;
- scatterwalk_map_and_copy(backup_iv, areq->src, areq->cryptlen - ivsize, ivsize, 0);
+ scatterwalk_map_and_copy(ctx->backup_iv, areq->src,
+ areq->cryptlen - ivsize, ivsize, 0);
}
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN4I_SS_DEBUG)) {
if (areq->iv) {
if (mode & SS_DECRYPTION) {
- memcpy(areq->iv, backup_iv, ivsize);
- kfree_sensitive(backup_iv);
+ memcpy(areq->iv, ctx->backup_iv, ivsize);
+ memzero_explicit(ctx->backup_iv, ivsize);
} else {
scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize,
ivsize, 0);
unsigned int ileft = areq->cryptlen;
unsigned int oleft = areq->cryptlen;
unsigned int todo;
- void *backup_iv = NULL;
struct sg_mapping_iter mi, mo;
unsigned long pi = 0, po = 0; /* progress for in and out */
bool miter_err;
return sun4i_ss_cipher_poll_fallback(areq);
if (areq->iv && ivsize > 0 && mode & SS_DECRYPTION) {
- backup_iv = kzalloc(ivsize, GFP_KERNEL);
- if (!backup_iv)
- return -ENOMEM;
- scatterwalk_map_and_copy(backup_iv, areq->src, areq->cryptlen - ivsize, ivsize, 0);
+ scatterwalk_map_and_copy(ctx->backup_iv, areq->src,
+ areq->cryptlen - ivsize, ivsize, 0);
}
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN4I_SS_DEBUG)) {
}
if (areq->iv) {
if (mode & SS_DECRYPTION) {
- memcpy(areq->iv, backup_iv, ivsize);
- kfree_sensitive(backup_iv);
+ memcpy(areq->iv, ctx->backup_iv, ivsize);
+ memzero_explicit(ctx->backup_iv, ivsize);
} else {
scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize,
ivsize, 0);