@@ -305,6 +305,9 @@ config CRYPTO_DEV_OMAP_AES
select CRYPTO_AES
select CRYPTO_BLKCIPHER
select CRYPTO_ENGINE
+ select CRYPTO_CBC
+ select CRYPTO_ECB
+ select CRYPTO_CTR
help
OMAP processors have AES module accelerator. Select this if you
want to use the OMAP module for AES algorithms.
@@ -103,6 +103,7 @@ struct omap_aes_ctx {
int keylen;
u32 key[AES_KEYSIZE_256 / sizeof(u32)];
unsigned long flags;
+ struct crypto_ablkcipher *fallback;
};
struct omap_aes_reqctx {
@@ -680,15 +681,28 @@ static void omap_aes_done_task(unsigned long data)
static int omap_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
{
+ struct crypto_tfm *tfm =
+ crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
struct omap_aes_ctx *ctx = crypto_ablkcipher_ctx(
crypto_ablkcipher_reqtfm(req));
struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req);
struct omap_aes_dev *dd;
+ int ret;
pr_debug("nbytes: %d, enc: %d, cbc: %d\n", req->nbytes,
!!(mode & FLAGS_ENCRYPT),
!!(mode & FLAGS_CBC));
+ if (req->nbytes < 200) {
+ ablkcipher_request_set_tfm(req, ctx->fallback);
+
+ if (mode & FLAGS_ENCRYPT)
+ ret = crypto_ablkcipher_encrypt(req);
+ else
+ ret = crypto_ablkcipher_decrypt(req);
+ ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(tfm));
+ return ret;
+ }
dd = omap_aes_find_dev(ctx);
if (!dd)
return -ENODEV;
@@ -704,6 +718,7 @@ static int omap_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
unsigned int keylen)
{
struct omap_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+ int ret;
if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
keylen != AES_KEYSIZE_256)
@@ -714,6 +729,14 @@ static int omap_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
memcpy(ctx->key, key, keylen);
ctx->keylen = keylen;
+ ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+ ctx->fallback->base.crt_flags |=
+ tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK;
+
+ ret = crypto_ablkcipher_setkey(ctx->fallback, key, keylen);
+ if (!ret)
+ return 0;
+
return 0;
}
@@ -751,6 +774,11 @@ static int omap_aes_cra_init(struct crypto_tfm *tfm)
{
struct omap_aes_dev *dd = NULL;
int err;
+ const char *name = crypto_tfm_alg_name(tfm);
+ const u32 flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK;
+ struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_ablkcipher *blk;
+
list_for_each_entry(dd, &dev_list, list) {
err = pm_runtime_get_sync(dd->dev);
@@ -761,6 +789,12 @@ static int omap_aes_cra_init(struct crypto_tfm *tfm)
}
}
+ blk = crypto_alloc_ablkcipher(name, 0, flags);
+ if (IS_ERR(blk))
+ return PTR_ERR(blk);
+
+ ctx->fallback = blk;
+
tfm->crt_ablkcipher.reqsize = sizeof(struct omap_aes_reqctx);
return 0;
@@ -769,11 +803,16 @@ static int omap_aes_cra_init(struct crypto_tfm *tfm)
static void omap_aes_cra_exit(struct crypto_tfm *tfm)
{
struct omap_aes_dev *dd = NULL;
+ struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm);
list_for_each_entry(dd, &dev_list, list) {
pm_runtime_put_sync(dd->dev);
}
+ if (ctx->fallback)
+ crypto_free_ablkcipher(ctx->fallback);
+
+ ctx->fallback = NULL;
}
/* ********************** ALGS ************************************ */
@@ -785,7 +824,7 @@ static struct crypto_alg algs_ecb_cbc[] = {
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct omap_aes_ctx),
.cra_alignmask = 0,
@@ -807,7 +846,7 @@ static struct crypto_alg algs_ecb_cbc[] = {
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct omap_aes_ctx),
.cra_alignmask = 0,
@@ -833,7 +872,7 @@ static struct crypto_alg algs_ctr[] = {
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct omap_aes_ctx),
.cra_alignmask = 0,