@@ -121,6 +121,7 @@ struct atmel_aes_ctr_ctx {
size_t offset;
struct scatterlist src[2];
struct scatterlist dst[2];
+ u16 blocks;
};
struct atmel_aes_gcm_ctx {
@@ -513,6 +514,26 @@ static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
}
}
+static inline struct atmel_aes_ctr_ctx *
+atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
+{
+ return container_of(ctx, struct atmel_aes_ctr_ctx, base);
+}
+
+static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd)
+{
+ struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
+ struct skcipher_request *req = skcipher_request_cast(dd->areq);
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
+ unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
+ int i;
+
+ for (i = 0; i < ctx->blocks; i++)
+ crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
+
+ memcpy(req->iv, ctx->iv, ivsize);
+}
+
static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
{
struct skcipher_request *req = skcipher_request_cast(dd->areq);
@@ -527,8 +548,12 @@ static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
dd->flags &= ~AES_FLAGS_BUSY;
if (!dd->ctx->is_aead &&
- (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB)
- atmel_aes_set_iv_as_last_ciphertext_block(dd);
+ (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) {
+ if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR)
+ atmel_aes_set_iv_as_last_ciphertext_block(dd);
+ else
+ atmel_aes_ctr_update_req_iv(dd);
+ }
if (dd->is_async)
dd->areq->complete(dd->areq, err);
@@ -1007,12 +1032,6 @@ static int atmel_aes_start(struct atmel_aes_dev *dd)
atmel_aes_transfer_complete);
}
-static inline struct atmel_aes_ctr_ctx *
-atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
-{
- return container_of(ctx, struct atmel_aes_ctr_ctx, base);
-}
-
static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
{
struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
@@ -1020,7 +1039,7 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
struct scatterlist *src, *dst;
size_t datalen;
u32 ctr;
- u16 blocks, start, end;
+ u16 start, end;
bool use_dma, fragmented = false;
/* Check for transfer completion. */
@@ -1030,14 +1049,14 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
/* Compute data length. */
datalen = req->cryptlen - ctx->offset;
- blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
+ ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
ctr = be32_to_cpu(ctx->iv[3]);
/* Check 16bit counter overflow. */
start = ctr & 0xffff;
- end = start + blocks - 1;
+ end = start + ctx->blocks - 1;
- if (blocks >> 16 || end < start) {
+ if (ctx->blocks >> 16 || end < start) {
ctr |= 0xffff;
datalen = AES_BLOCK_SIZE * (0x10000 - start);
fragmented = true;