@@ -215,6 +215,7 @@ struct rk_ahash_ctx {
/* the private variable of hash for fallback */
struct rk_ahash_rctx {
+ struct rk_crypto_info *dev;
struct ahash_request fallback_req;
u32 mode;
int nrsg;
@@ -231,6 +232,7 @@ struct rk_cipher_ctx {
};
struct rk_cipher_rctx {
+ struct rk_crypto_info *dev;
u8 backup_iv[AES_BLOCK_SIZE];
u32 mode;
struct skcipher_request fallback_req; // keep at the end
@@ -200,6 +200,7 @@ static int rk_ahash_export(struct ahash_request *req, void *out)
static int rk_ahash_digest(struct ahash_request *req)
{
+ struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm);
struct rk_crypto_info *dev = tctx->main;
@@ -209,6 +210,8 @@ static int rk_ahash_digest(struct ahash_request *req)
if (!req->nbytes)
return zero_message_process(req);
+ rctx->dev = dev;
+
return crypto_transfer_hash_request_to_engine(dev->engine, req);
}
@@ -256,11 +259,10 @@ static int rk_hash_run(struct crypto_engine *engine, void *breq)
struct ahash_request *areq = container_of(breq, struct ahash_request, base);
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
- struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash);
struct scatterlist *sg = areq->src;
- struct rk_crypto_info *rkc = tctx->main;
+ struct rk_crypto_info *rkc = rctx->dev;
int err = 0;
int i;
u32 v;
@@ -96,12 +96,15 @@ static int rk_cipher_handle_req(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct rk_cipher_ctx *tctx = crypto_skcipher_ctx(tfm);
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
struct rk_crypto_info *rkc = tctx->main;
struct crypto_engine *engine = rkc->engine;
if (rk_cipher_need_fallback(req))
return rk_cipher_fallback(req);
+ rctx->dev = rkc;
+
return crypto_transfer_skcipher_request_to_engine(engine, req);
}
@@ -300,7 +303,6 @@ static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
{
struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
- struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
struct scatterlist *sgs, *sgd;
int err = 0;
@@ -314,7 +316,7 @@ static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
unsigned int todo;
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
- struct rk_crypto_info *rkc = ctx->main;
+ struct rk_crypto_info *rkc = rctx->dev;
algt->stat_req++;
The crypto_info to use must be stored in the request context. This will help when 2 crypto_info will be available on rk3399. Signed-off-by: Corentin Labbe <clabbe@baylibre.com> --- drivers/crypto/rockchip/rk3288_crypto.h | 2 ++ drivers/crypto/rockchip/rk3288_crypto_ahash.c | 6 ++++-- drivers/crypto/rockchip/rk3288_crypto_skcipher.c | 6 ++++-- 3 files changed, 10 insertions(+), 4 deletions(-)