@@ -73,7 +73,6 @@ static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
int err;
- SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, op->fallback_tfm);
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct sun8i_ss_alg_template *algt;
@@ -81,15 +80,15 @@ static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
algt->stat_fb++;
#endif
- skcipher_request_set_sync_tfm(subreq, op->fallback_tfm);
- skcipher_request_set_callback(subreq, areq->base.flags, NULL, NULL);
- skcipher_request_set_crypt(subreq, areq->src, areq->dst,
+ skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
+ skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
+ areq->base.complete, areq->base.data);
+ skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
areq->cryptlen, areq->iv);
if (rctx->op_dir & SS_DECRYPTION)
- err = crypto_skcipher_decrypt(subreq);
+ err = crypto_skcipher_decrypt(&rctx->fallback_req);
else
- err = crypto_skcipher_encrypt(subreq);
- skcipher_request_zero(subreq);
+ err = crypto_skcipher_encrypt(&rctx->fallback_req);
return err;
}
@@ -334,18 +333,20 @@ int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
op->ss = algt->ss;
- sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx);
-
- op->fallback_tfm = crypto_alloc_sync_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
+ op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(op->fallback_tfm)) {
dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
name, PTR_ERR(op->fallback_tfm));
return PTR_ERR(op->fallback_tfm);
}
+ sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx) +
+ crypto_skcipher_reqsize(op->fallback_tfm);
+
+
dev_info(op->ss->dev, "Fallback for %s is %s\n",
crypto_tfm_alg_driver_name(&sktfm->base),
- crypto_tfm_alg_driver_name(crypto_skcipher_tfm(&op->fallback_tfm->base)));
+ crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)));
op->enginectx.op.do_one_request = sun8i_ss_handle_cipher_request;
op->enginectx.op.prepare_request = NULL;
@@ -359,7 +360,7 @@ int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
return 0;
error_pm:
- crypto_free_sync_skcipher(op->fallback_tfm);
+ crypto_free_skcipher(op->fallback_tfm);
return err;
}
@@ -371,7 +372,7 @@ void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)
memzero_explicit(op->key, op->keylen);
kfree(op->key);
}
- crypto_free_sync_skcipher(op->fallback_tfm);
+ crypto_free_skcipher(op->fallback_tfm);
pm_runtime_put_sync(op->ss->dev);
}
@@ -401,10 +402,10 @@ int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
if (!op->key)
return -ENOMEM;
- crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
- crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
- return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen);
+ return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
}
int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
@@ -427,8 +428,8 @@ int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
if (!op->key)
return -ENOMEM;
- crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
- crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
- return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen);
+ return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
}
@@ -135,17 +135,18 @@ struct sun8i_ss_dev {
/*
* struct sun8i_cipher_req_ctx - context for a skcipher request
- * @t_src: list of mapped SGs with their size
- * @t_dst: list of mapped SGs with their size
- * @p_key: DMA address of the key
- * @p_iv: DMA address of the IV
- * @method: current algorithm for this request
- * @op_mode: op_mode for this request
- * @op_dir: direction (encrypt vs decrypt) for this request
- * @flow: the flow to use for this request
- * @ivlen: size of biv
- * @keylen: keylen for this request
- * @biv: buffer which contain the IV
+ * @t_src: list of mapped SGs with their size
+ * @t_dst: list of mapped SGs with their size
+ * @p_key: DMA address of the key
+ * @p_iv: DMA address of the IV
+ * @method: current algorithm for this request
+ * @op_mode: op_mode for this request
+ * @op_dir: direction (encrypt vs decrypt) for this request
+ * @flow: the flow to use for this request
+ * @ivlen: size of biv
+ * @keylen: keylen for this request
+ * @biv: buffer which contain the IV
+ * @fallback_req: request struct for invoking the fallback skcipher TFM
*/
struct sun8i_cipher_req_ctx {
struct sginfo t_src[MAX_SG];
@@ -159,6 +160,7 @@ struct sun8i_cipher_req_ctx {
unsigned int ivlen;
unsigned int keylen;
void *biv;
+ struct skcipher_request fallback_req; // keep at the end
};
/*
@@ -174,7 +176,7 @@ struct sun8i_cipher_tfm_ctx {
u32 *key;
u32 keylen;
struct sun8i_ss_dev *ss;
- struct crypto_sync_skcipher *fallback_tfm;
+ struct crypto_skcipher *fallback_tfm;
};
/*