@@ -664,11 +664,11 @@ static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
len = 6;
}
written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
- scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
+ memcpy_from_scatterwalk((char *)block + len, &cryp->in_walk, written);
writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
cryp->header_in -= written;
@@ -991,11 +991,11 @@ static int stm32_cryp_header_dma_start(struct stm32_cryp *cryp)
tx_in->callback_param = cryp;
tx_in->callback = stm32_cryp_header_dma_callback;
/* Advance scatterwalk to not DMA'ed data */
align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
- scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
+ scatterwalk_skip(&cryp->in_walk, align_size);
cryp->header_in -= align_size;
ret = dma_submit_error(dmaengine_submit(tx_in));
if (ret < 0) {
dev_err(cryp->dev, "DMA in submit failed\n");
@@ -1054,22 +1054,22 @@ static int stm32_cryp_dma_start(struct stm32_cryp *cryp)
tx_out->callback = stm32_cryp_dma_callback;
tx_out->callback_param = cryp;
/* Advance scatterwalk to not DMA'ed data */
align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
- scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
+ scatterwalk_skip(&cryp->in_walk, align_size);
cryp->payload_in -= align_size;
ret = dma_submit_error(dmaengine_submit(tx_in));
if (ret < 0) {
dev_err(cryp->dev, "DMA in submit failed\n");
return ret;
}
dma_async_issue_pending(cryp->dma_lch_in);
/* Advance scatterwalk to not DMA'ed data */
- scatterwalk_copychunks(NULL, &cryp->out_walk, align_size, 2);
+ scatterwalk_skip(&cryp->out_walk, align_size);
cryp->payload_out -= align_size;
ret = dma_submit_error(dmaengine_submit(tx_out));
if (ret < 0) {
dev_err(cryp->dev, "DMA out submit failed\n");
return ret;
@@ -1735,13 +1735,13 @@ static int stm32_cryp_prepare_req(struct skcipher_request *req,
in_sg = areq->src;
out_sg = areq->dst;
scatterwalk_start(&cryp->in_walk, in_sg);
- scatterwalk_start(&cryp->out_walk, out_sg);
/* In output, jump after assoc data */
- scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
+ scatterwalk_start_at_pos(&cryp->out_walk, out_sg,
+ areq->assoclen);
ret = stm32_cryp_hw_init(cryp);
if (ret)
return ret;
@@ -1871,16 +1871,16 @@ static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
if (is_encrypt(cryp)) {
u32 out_tag[AES_BLOCK_32];
/* Get and write tag */
readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
- scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
+ memcpy_to_scatterwalk(&cryp->out_walk, out_tag, cryp->authsize);
} else {
/* Get and check tag */
u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
- scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
+ memcpy_from_scatterwalk(in_tag, &cryp->in_walk, cryp->authsize);
readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
if (crypto_memneq(in_tag, out_tag, cryp->authsize))
ret = -EBADMSG;
}
@@ -1921,22 +1921,22 @@ static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
{
u32 block[AES_BLOCK_32];
readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
- scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
- cryp->payload_out), 1);
+ memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
+ cryp->payload_out));
cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
cryp->payload_out);
}
static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
{
u32 block[AES_BLOCK_32] = {0};
- scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
- cryp->payload_in), 0);
+ memcpy_from_scatterwalk(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
+ cryp->payload_in));
writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
}
static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
@@ -1979,12 +1979,12 @@ static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
* Same code as stm32_cryp_irq_read_data(), but we want to store
* block value
*/
readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
- scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
- cryp->payload_out), 1);
+ memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
+ cryp->payload_out));
cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
cryp->payload_out);
/* d) change mode back to AES GCM */
cfg &= ~CR_ALGO_MASK;
@@ -2077,12 +2077,12 @@ static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
* Same code as stm32_cryp_irq_read_data(), but we want to store
* block value
*/
readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
- scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
- cryp->payload_out), 1);
+ memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
+ cryp->payload_out));
cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
/* d) Load again CRYP_CSGCMCCMxR */
for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
@@ -2159,11 +2159,11 @@ static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
u32 block[AES_BLOCK_32] = {0};
size_t written;
written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
- scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
+ memcpy_from_scatterwalk(block, &cryp->in_walk, written);
writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
cryp->header_in -= written;