struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
+ struct rk_crypto_info *rkc = tctx->dev;
int ret;
- ret = dma_map_sg(tctx->dev->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);
+ ret = dma_map_sg(rkc->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);
if (ret <= 0)
return -EINVAL;
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
+ struct rk_crypto_info *rkc = tctx->dev;
- dma_unmap_sg(tctx->dev->dev, areq->src, rctx->nrsg, DMA_TO_DEVICE);
+ dma_unmap_sg(rkc->dev, areq->src, rctx->nrsg, DMA_TO_DEVICE);
return 0;
}
struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash);
struct scatterlist *sg = areq->src;
+ struct rk_crypto_info *rkc = tctx->dev;
int err = 0;
int i;
u32 v;
rk_ahash_reg_init(areq);
while (sg) {
- reinit_completion(&tctx->dev->complete);
- tctx->dev->status = 0;
- crypto_ahash_dma_start(tctx->dev, sg);
- wait_for_completion_interruptible_timeout(&tctx->dev->complete,
+ reinit_completion(&rkc->complete);
+ rkc->status = 0;
+ crypto_ahash_dma_start(rkc, sg);
+ wait_for_completion_interruptible_timeout(&rkc->complete,
msecs_to_jiffies(2000));
- if (!tctx->dev->status) {
- dev_err(tctx->dev->dev, "DMA timeout\n");
+ if (!rkc->status) {
+ dev_err(rkc->dev, "DMA timeout\n");
err = -EFAULT;
goto theend;
}
* efficiency, and make it response quickly when dma
* complete.
*/
- readl_poll_timeout(tctx->dev->reg + RK_CRYPTO_HASH_STS, v, v == 0, 10, 1000);
+ readl_poll_timeout(rkc->reg + RK_CRYPTO_HASH_STS, v, v == 0, 10, 1000);
for (i = 0; i < crypto_ahash_digestsize(tfm) / 4; i++) {
- v = readl(tctx->dev->reg + RK_CRYPTO_HASH_DOUT_0 + i * 4);
+ v = readl(rkc->reg + RK_CRYPTO_HASH_DOUT_0 + i * 4);
put_unaligned_le32(v, areq->result + i * 4);
}
unsigned int todo;
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
+ struct rk_crypto_info *rkc = ctx->dev;
algt->stat_req++;
scatterwalk_map_and_copy(biv, sgs, offset, ivsize, 0);
}
if (sgs == sgd) {
- err = dma_map_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
+ err = dma_map_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
if (err <= 0) {
err = -EINVAL;
goto theend_iv;
}
} else {
- err = dma_map_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
+ err = dma_map_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
if (err <= 0) {
err = -EINVAL;
goto theend_iv;
}
- err = dma_map_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
+ err = dma_map_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
if (err <= 0) {
err = -EINVAL;
goto theend_sgs;
}
}
err = 0;
- rk_cipher_hw_init(ctx->dev, areq);
+ rk_cipher_hw_init(rkc, areq);
if (ivsize) {
if (ivsize == DES_BLOCK_SIZE)
- memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize);
+ memcpy_toio(rkc->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize);
else
- memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize);
+ memcpy_toio(rkc->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize);
}
- reinit_completion(&ctx->dev->complete);
- ctx->dev->status = 0;
+ reinit_completion(&rkc->complete);
+ rkc->status = 0;
todo = min(sg_dma_len(sgs), len);
len -= todo;
- crypto_dma_start(ctx->dev, sgs, sgd, todo / 4);
- wait_for_completion_interruptible_timeout(&ctx->dev->complete,
+ crypto_dma_start(rkc, sgs, sgd, todo / 4);
+ wait_for_completion_interruptible_timeout(&rkc->complete,
msecs_to_jiffies(2000));
- if (!ctx->dev->status) {
- dev_err(ctx->dev->dev, "DMA timeout\n");
+ if (!rkc->status) {
+ dev_err(rkc->dev, "DMA timeout\n");
err = -EFAULT;
goto theend;
}
if (sgs == sgd) {
- dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
+ dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
} else {
- dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
- dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
+ dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
+ dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
}
if (rctx->mode & RK_CRYPTO_DEC) {
memcpy(iv, biv, ivsize);
theend_sgs:
if (sgs == sgd) {
- dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
+ dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
} else {
- dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
- dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
+ dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
+ dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
}
theend_iv:
return err;