Message ID | 20210420033602.1729947-7-thara.gopinath@linaro.org (mailing list archive) |
---|---|
State | Superseded |
Headers | show |
Series | Add support for AEAD algorithms in Qualcomm Crypto Engine driver | expand |
Hi Thara, Thank you for the patch! Perhaps something to improve: [auto build test WARNING on cryptodev/master] [also build test WARNING on next-20210421] [cannot apply to crypto/master v5.12-rc8] [If your patch is applied to the wrong git tree, kindly drop us a note. And when submitting patch, we suggest to use '--base' as documented in https://git-scm.com/docs/git-format-patch] url: https://github.com/0day-ci/linux/commits/Thara-Gopinath/Add-support-for-AEAD-algorithms-in-Qualcomm-Crypto-Engine-driver/20210420-113944 base: https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git master config: x86_64-randconfig-a013-20210421 (attached as .config) compiler: clang version 13.0.0 (https://github.com/llvm/llvm-project d87b9b81ccb95217181ce75515c6c68bbb408ca4) reproduce (this is a W=1 build): wget https://raw.githubusercontent.com/intel/lkp-tests/master/sbin/make.cross -O ~/bin/make.cross chmod +x ~/bin/make.cross # install x86_64 cross compiling tool for clang build # apt-get install binutils-x86-64-linux-gnu # https://github.com/0day-ci/linux/commit/b152c1b17bb6ad7923f0f3f8bc5ef81fb4cd054a git remote add linux-review https://github.com/0day-ci/linux git fetch --no-tags linux-review Thara-Gopinath/Add-support-for-AEAD-algorithms-in-Qualcomm-Crypto-Engine-driver/20210420-113944 git checkout b152c1b17bb6ad7923f0f3f8bc5ef81fb4cd054a # save the attached .config to linux build tree COMPILER_INSTALL_PATH=$HOME/0day COMPILER=clang make.cross W=1 ARCH=x86_64 If you fix the issue, kindly add following tag as appropriate Reported-by: kernel test robot <lkp@intel.com> All warnings (new ones prefixed by >>): >> drivers/crypto/qce/common.c:478:14: warning: variable 'auth_ivsize' is used uninitialized whenever 'if' condition is false [-Wsometimes-uninitialized] } else if (IS_SHA256_HMAC(rctx->flags)) { ^~~~~~~~~~~~~~~~~~~~~~~~~~~ drivers/crypto/qce/common.h:68:32: note: expanded from macro 'IS_SHA256_HMAC' #define IS_SHA256_HMAC(flags) (flags & QCE_HASH_SHA256_HMAC) ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ drivers/crypto/qce/common.c:482:18: note: uninitialized use occurs here authiv_words = auth_ivsize / sizeof(u32); ^~~~~~~~~~~ drivers/crypto/qce/common.c:478:10: note: remove the 'if' if its condition is always true } else if (IS_SHA256_HMAC(rctx->flags)) { ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ drivers/crypto/qce/common.c:434:26: note: initialize the variable 'auth_ivsize' to silence this warning unsigned int auth_ivsize; ^ = 0 1 warning generated. vim +478 drivers/crypto/qce/common.c 418 419 static int qce_setup_regs_aead(struct crypto_async_request *async_req) 420 { 421 struct aead_request *req = aead_request_cast(async_req); 422 struct qce_aead_reqctx *rctx = aead_request_ctx(req); 423 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); 424 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req)); 425 struct qce_device *qce = tmpl->qce; 426 u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0}; 427 u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0}; 428 u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0}; 429 u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0}; 430 u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0}; 431 unsigned int enc_keylen = ctx->enc_keylen; 432 unsigned int auth_keylen = ctx->auth_keylen; 433 unsigned int enc_ivsize = rctx->ivsize; 434 unsigned int auth_ivsize; 435 unsigned int enckey_words, enciv_words; 436 unsigned int authkey_words, authiv_words, authnonce_words; 437 unsigned long flags = rctx->flags; 438 u32 encr_cfg, auth_cfg, config, totallen; 439 u32 iv_last_word; 440 441 qce_setup_config(qce); 442 443 /* Write encryption key */ 444 enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen); 445 qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words); 446 447 /* Write encryption iv */ 448 enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize); 449 qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words); 450 451 if (IS_CCM(rctx->flags)) { 452 iv_last_word = enciv[enciv_words - 1]; 453 qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1); 454 qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words); 455 qce_write(qce, REG_CNTR_MASK, ~0); 456 qce_write(qce, REG_CNTR_MASK0, ~0); 457 qce_write(qce, REG_CNTR_MASK1, ~0); 458 qce_write(qce, REG_CNTR_MASK2, ~0); 459 } 460 461 /* Clear authentication IV and KEY registers of previous values */ 462 qce_clear_array(qce, REG_AUTH_IV0, 16); 463 qce_clear_array(qce, REG_AUTH_KEY0, 16); 464 465 /* Clear byte count */ 466 qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); 467 468 /* Write authentication key */ 469 authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen); 470 qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words); 471 472 /* Write initial authentication IV only for HMAC algorithms */ 473 if (IS_SHA_HMAC(rctx->flags)) { 474 /* Write default authentication iv */ 475 if (IS_SHA1_HMAC(rctx->flags)) { 476 auth_ivsize = SHA1_DIGEST_SIZE; 477 memcpy(authiv, std_iv_sha1, auth_ivsize); > 478 } else if (IS_SHA256_HMAC(rctx->flags)) { 479 auth_ivsize = SHA256_DIGEST_SIZE; 480 memcpy(authiv, std_iv_sha256, auth_ivsize); 481 } 482 authiv_words = auth_ivsize / sizeof(u32); 483 qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words); 484 } else if (IS_CCM(rctx->flags)) { 485 /* Write nonce for CCM algorithms */ 486 authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE); 487 qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words); 488 } 489 490 /* Set up ENCR_SEG_CFG */ 491 encr_cfg = qce_encr_cfg(flags, enc_keylen); 492 if (IS_ENCRYPT(flags)) 493 encr_cfg |= BIT(ENCODE_SHIFT); 494 qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg); 495 496 /* Set up AUTH_SEG_CFG */ 497 auth_cfg = qce_auth_cfg(rctx->flags, auth_keylen, ctx->authsize); 498 auth_cfg |= BIT(AUTH_LAST_SHIFT); 499 auth_cfg |= BIT(AUTH_FIRST_SHIFT); 500 if (IS_ENCRYPT(flags)) { 501 if (IS_CCM(rctx->flags)) 502 auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; 503 else 504 auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; 505 } else { 506 if (IS_CCM(rctx->flags)) 507 auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; 508 else 509 auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; 510 } 511 qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg); 512 513 totallen = rctx->cryptlen + rctx->assoclen; 514 515 /* Set the encryption size and start offset */ 516 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) 517 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); 518 else 519 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); 520 qce_write(qce, REG_ENCR_SEG_START, rctx->assoclen & 0xffff); 521 522 /* Set the authentication size and start offset */ 523 qce_write(qce, REG_AUTH_SEG_SIZE, totallen); 524 qce_write(qce, REG_AUTH_SEG_START, 0); 525 526 /* Write total length */ 527 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) 528 qce_write(qce, REG_SEG_SIZE, totallen + ctx->authsize); 529 else 530 qce_write(qce, REG_SEG_SIZE, totallen); 531 532 /* get little endianness */ 533 config = qce_config_reg(qce, 1); 534 qce_write(qce, REG_CONFIG, config); 535 536 /* Start the process */ 537 qce_crypto_go(qce, !IS_CCM(flags)); 538 539 return 0; 540 } 541 #endif 542 --- 0-DAY CI Kernel Test Service, Intel Corporation https://lists.01.org/hyperkitty/list/kbuild-all@lists.01.org
Hi Thara, url: https://github.com/0day-ci/linux/commits/Thara-Gopinath/Add-support-for-AEAD-algorithms-in-Qualcomm-Crypto-Engine-driver/20210420-113944 base: https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git master config: arm-randconfig-m031-20210428 (attached as .config) compiler: arm-linux-gnueabi-gcc (GCC) 9.3.0 If you fix the issue, kindly add following tag as appropriate Reported-by: kernel test robot <lkp@intel.com> Reported-by: Dan Carpenter <dan.carpenter@oracle.com> smatch warnings: drivers/crypto/qce/common.c:482 qce_setup_regs_aead() error: uninitialized symbol 'auth_ivsize'. vim +/auth_ivsize +482 drivers/crypto/qce/common.c b152c1b17bb6ad Thara Gopinath 2021-04-19 419 static int qce_setup_regs_aead(struct crypto_async_request *async_req) b152c1b17bb6ad Thara Gopinath 2021-04-19 420 { b152c1b17bb6ad Thara Gopinath 2021-04-19 421 struct aead_request *req = aead_request_cast(async_req); b152c1b17bb6ad Thara Gopinath 2021-04-19 422 struct qce_aead_reqctx *rctx = aead_request_ctx(req); b152c1b17bb6ad Thara Gopinath 2021-04-19 423 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); b152c1b17bb6ad Thara Gopinath 2021-04-19 424 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req)); b152c1b17bb6ad Thara Gopinath 2021-04-19 425 struct qce_device *qce = tmpl->qce; b152c1b17bb6ad Thara Gopinath 2021-04-19 426 u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0}; b152c1b17bb6ad Thara Gopinath 2021-04-19 427 u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0}; b152c1b17bb6ad Thara Gopinath 2021-04-19 428 u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0}; b152c1b17bb6ad Thara Gopinath 2021-04-19 429 u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0}; b152c1b17bb6ad Thara Gopinath 2021-04-19 430 u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0}; b152c1b17bb6ad Thara Gopinath 2021-04-19 431 unsigned int enc_keylen = ctx->enc_keylen; b152c1b17bb6ad Thara Gopinath 2021-04-19 432 unsigned int auth_keylen = ctx->auth_keylen; b152c1b17bb6ad Thara Gopinath 2021-04-19 433 unsigned int enc_ivsize = rctx->ivsize; b152c1b17bb6ad Thara Gopinath 2021-04-19 434 unsigned int auth_ivsize; b152c1b17bb6ad Thara Gopinath 2021-04-19 435 unsigned int enckey_words, enciv_words; b152c1b17bb6ad Thara Gopinath 2021-04-19 436 unsigned int authkey_words, authiv_words, authnonce_words; b152c1b17bb6ad Thara Gopinath 2021-04-19 437 unsigned long flags = rctx->flags; b152c1b17bb6ad Thara Gopinath 2021-04-19 438 u32 encr_cfg, auth_cfg, config, totallen; b152c1b17bb6ad Thara Gopinath 2021-04-19 439 u32 iv_last_word; b152c1b17bb6ad Thara Gopinath 2021-04-19 440 b152c1b17bb6ad Thara Gopinath 2021-04-19 441 qce_setup_config(qce); b152c1b17bb6ad Thara Gopinath 2021-04-19 442 b152c1b17bb6ad Thara Gopinath 2021-04-19 443 /* Write encryption key */ b152c1b17bb6ad Thara Gopinath 2021-04-19 444 enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen); b152c1b17bb6ad Thara Gopinath 2021-04-19 445 qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 446 b152c1b17bb6ad Thara Gopinath 2021-04-19 447 /* Write encryption iv */ b152c1b17bb6ad Thara Gopinath 2021-04-19 448 enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize); b152c1b17bb6ad Thara Gopinath 2021-04-19 449 qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 450 b152c1b17bb6ad Thara Gopinath 2021-04-19 451 if (IS_CCM(rctx->flags)) { b152c1b17bb6ad Thara Gopinath 2021-04-19 452 iv_last_word = enciv[enciv_words - 1]; b152c1b17bb6ad Thara Gopinath 2021-04-19 453 qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1); b152c1b17bb6ad Thara Gopinath 2021-04-19 454 qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 455 qce_write(qce, REG_CNTR_MASK, ~0); b152c1b17bb6ad Thara Gopinath 2021-04-19 456 qce_write(qce, REG_CNTR_MASK0, ~0); b152c1b17bb6ad Thara Gopinath 2021-04-19 457 qce_write(qce, REG_CNTR_MASK1, ~0); b152c1b17bb6ad Thara Gopinath 2021-04-19 458 qce_write(qce, REG_CNTR_MASK2, ~0); b152c1b17bb6ad Thara Gopinath 2021-04-19 459 } b152c1b17bb6ad Thara Gopinath 2021-04-19 460 b152c1b17bb6ad Thara Gopinath 2021-04-19 461 /* Clear authentication IV and KEY registers of previous values */ b152c1b17bb6ad Thara Gopinath 2021-04-19 462 qce_clear_array(qce, REG_AUTH_IV0, 16); b152c1b17bb6ad Thara Gopinath 2021-04-19 463 qce_clear_array(qce, REG_AUTH_KEY0, 16); b152c1b17bb6ad Thara Gopinath 2021-04-19 464 b152c1b17bb6ad Thara Gopinath 2021-04-19 465 /* Clear byte count */ b152c1b17bb6ad Thara Gopinath 2021-04-19 466 qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); b152c1b17bb6ad Thara Gopinath 2021-04-19 467 b152c1b17bb6ad Thara Gopinath 2021-04-19 468 /* Write authentication key */ b152c1b17bb6ad Thara Gopinath 2021-04-19 469 authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen); b152c1b17bb6ad Thara Gopinath 2021-04-19 470 qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 471 b152c1b17bb6ad Thara Gopinath 2021-04-19 472 /* Write initial authentication IV only for HMAC algorithms */ b152c1b17bb6ad Thara Gopinath 2021-04-19 473 if (IS_SHA_HMAC(rctx->flags)) { b152c1b17bb6ad Thara Gopinath 2021-04-19 474 /* Write default authentication iv */ b152c1b17bb6ad Thara Gopinath 2021-04-19 475 if (IS_SHA1_HMAC(rctx->flags)) { b152c1b17bb6ad Thara Gopinath 2021-04-19 476 auth_ivsize = SHA1_DIGEST_SIZE; b152c1b17bb6ad Thara Gopinath 2021-04-19 477 memcpy(authiv, std_iv_sha1, auth_ivsize); b152c1b17bb6ad Thara Gopinath 2021-04-19 478 } else if (IS_SHA256_HMAC(rctx->flags)) { b152c1b17bb6ad Thara Gopinath 2021-04-19 479 auth_ivsize = SHA256_DIGEST_SIZE; b152c1b17bb6ad Thara Gopinath 2021-04-19 480 memcpy(authiv, std_iv_sha256, auth_ivsize); b152c1b17bb6ad Thara Gopinath 2021-04-19 481 } No else path. b152c1b17bb6ad Thara Gopinath 2021-04-19 @482 authiv_words = auth_ivsize / sizeof(u32); b152c1b17bb6ad Thara Gopinath 2021-04-19 483 qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 484 } else if (IS_CCM(rctx->flags)) { b152c1b17bb6ad Thara Gopinath 2021-04-19 485 /* Write nonce for CCM algorithms */ b152c1b17bb6ad Thara Gopinath 2021-04-19 486 authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE); b152c1b17bb6ad Thara Gopinath 2021-04-19 487 qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words); b152c1b17bb6ad Thara Gopinath 2021-04-19 488 } --- 0-DAY CI Kernel Test Service, Intel Corporation https://lists.01.org/hyperkitty/list/kbuild-all@lists.01.org
diff --git a/drivers/crypto/qce/common.c b/drivers/crypto/qce/common.c index 7b3d6caec1b2..6d6b3792323b 100644 --- a/drivers/crypto/qce/common.c +++ b/drivers/crypto/qce/common.c @@ -15,6 +15,7 @@ #include "core.h" #include "regs-v5.h" #include "sha.h" +#include "aead.h" static inline u32 qce_read(struct qce_device *qce, u32 offset) { @@ -96,7 +97,7 @@ static inline void qce_crypto_go(struct qce_device *qce, bool result_dump) qce_write(qce, REG_GOPROC, BIT(GO_SHIFT)); } -#ifdef CONFIG_CRYPTO_DEV_QCE_SHA +#if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD) static u32 qce_auth_cfg(unsigned long flags, u32 key_size, u32 auth_size) { u32 cfg = 0; @@ -139,7 +140,9 @@ static u32 qce_auth_cfg(unsigned long flags, u32 key_size, u32 auth_size) return cfg; } +#endif +#ifdef CONFIG_CRYPTO_DEV_QCE_SHA static int qce_setup_regs_ahash(struct crypto_async_request *async_req) { struct ahash_request *req = ahash_request_cast(async_req); @@ -225,7 +228,7 @@ static int qce_setup_regs_ahash(struct crypto_async_request *async_req) } #endif -#ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER +#if defined(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD) static u32 qce_encr_cfg(unsigned long flags, u32 aes_key_size) { u32 cfg = 0; @@ -271,7 +274,9 @@ static u32 qce_encr_cfg(unsigned long flags, u32 aes_key_size) return cfg; } +#endif +#ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize) { u8 swap[QCE_AES_IV_LENGTH]; @@ -386,6 +391,155 @@ static int qce_setup_regs_skcipher(struct crypto_async_request *async_req) } #endif +#ifdef CONFIG_CRYPTO_DEV_QCE_AEAD +static const u32 std_iv_sha1[SHA256_DIGEST_SIZE / sizeof(u32)] = { + SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, 0, 0, 0 +}; + +static const u32 std_iv_sha256[SHA256_DIGEST_SIZE / sizeof(u32)] = { + SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, + SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7 +}; + +static unsigned int qce_be32_to_cpu_array(u32 *dst, const u8 *src, unsigned int len) +{ + u32 *d = dst; + const u8 *s = src; + unsigned int n; + + n = len / sizeof(u32); + for (; n > 0; n--) { + *d = be32_to_cpup((const __be32 *)s); + s += sizeof(u32); + d++; + } + return DIV_ROUND_UP(len, sizeof(u32)); +} + +static int qce_setup_regs_aead(struct crypto_async_request *async_req) +{ + struct aead_request *req = aead_request_cast(async_req); + struct qce_aead_reqctx *rctx = aead_request_ctx(req); + struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); + struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req)); + struct qce_device *qce = tmpl->qce; + u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0}; + u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0}; + u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0}; + u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0}; + u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0}; + unsigned int enc_keylen = ctx->enc_keylen; + unsigned int auth_keylen = ctx->auth_keylen; + unsigned int enc_ivsize = rctx->ivsize; + unsigned int auth_ivsize; + unsigned int enckey_words, enciv_words; + unsigned int authkey_words, authiv_words, authnonce_words; + unsigned long flags = rctx->flags; + u32 encr_cfg, auth_cfg, config, totallen; + u32 iv_last_word; + + qce_setup_config(qce); + + /* Write encryption key */ + enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen); + qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words); + + /* Write encryption iv */ + enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize); + qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words); + + if (IS_CCM(rctx->flags)) { + iv_last_word = enciv[enciv_words - 1]; + qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1); + qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words); + qce_write(qce, REG_CNTR_MASK, ~0); + qce_write(qce, REG_CNTR_MASK0, ~0); + qce_write(qce, REG_CNTR_MASK1, ~0); + qce_write(qce, REG_CNTR_MASK2, ~0); + } + + /* Clear authentication IV and KEY registers of previous values */ + qce_clear_array(qce, REG_AUTH_IV0, 16); + qce_clear_array(qce, REG_AUTH_KEY0, 16); + + /* Clear byte count */ + qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); + + /* Write authentication key */ + authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen); + qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words); + + /* Write initial authentication IV only for HMAC algorithms */ + if (IS_SHA_HMAC(rctx->flags)) { + /* Write default authentication iv */ + if (IS_SHA1_HMAC(rctx->flags)) { + auth_ivsize = SHA1_DIGEST_SIZE; + memcpy(authiv, std_iv_sha1, auth_ivsize); + } else if (IS_SHA256_HMAC(rctx->flags)) { + auth_ivsize = SHA256_DIGEST_SIZE; + memcpy(authiv, std_iv_sha256, auth_ivsize); + } + authiv_words = auth_ivsize / sizeof(u32); + qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words); + } else if (IS_CCM(rctx->flags)) { + /* Write nonce for CCM algorithms */ + authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE); + qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words); + } + + /* Set up ENCR_SEG_CFG */ + encr_cfg = qce_encr_cfg(flags, enc_keylen); + if (IS_ENCRYPT(flags)) + encr_cfg |= BIT(ENCODE_SHIFT); + qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg); + + /* Set up AUTH_SEG_CFG */ + auth_cfg = qce_auth_cfg(rctx->flags, auth_keylen, ctx->authsize); + auth_cfg |= BIT(AUTH_LAST_SHIFT); + auth_cfg |= BIT(AUTH_FIRST_SHIFT); + if (IS_ENCRYPT(flags)) { + if (IS_CCM(rctx->flags)) + auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; + else + auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; + } else { + if (IS_CCM(rctx->flags)) + auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; + else + auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; + } + qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg); + + totallen = rctx->cryptlen + rctx->assoclen; + + /* Set the encryption size and start offset */ + if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) + qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); + else + qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); + qce_write(qce, REG_ENCR_SEG_START, rctx->assoclen & 0xffff); + + /* Set the authentication size and start offset */ + qce_write(qce, REG_AUTH_SEG_SIZE, totallen); + qce_write(qce, REG_AUTH_SEG_START, 0); + + /* Write total length */ + if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) + qce_write(qce, REG_SEG_SIZE, totallen + ctx->authsize); + else + qce_write(qce, REG_SEG_SIZE, totallen); + + /* get little endianness */ + config = qce_config_reg(qce, 1); + qce_write(qce, REG_CONFIG, config); + + /* Start the process */ + qce_crypto_go(qce, !IS_CCM(flags)); + + return 0; +} +#endif + int qce_start(struct crypto_async_request *async_req, u32 type) { switch (type) { @@ -396,6 +550,10 @@ int qce_start(struct crypto_async_request *async_req, u32 type) #ifdef CONFIG_CRYPTO_DEV_QCE_SHA case CRYPTO_ALG_TYPE_AHASH: return qce_setup_regs_ahash(async_req); +#endif +#ifdef CONFIG_CRYPTO_DEV_QCE_AEAD + case CRYPTO_ALG_TYPE_AEAD: + return qce_setup_regs_aead(async_req); #endif default: return -EINVAL;
Add register programming sequence for enabling AEAD algorithms on the Qualcomm crypto engine. Signed-off-by: Thara Gopinath <thara.gopinath@linaro.org> --- v2->v3: - Made qce_be32_to_cpu_array truly be32 to cpu endian by using be32_to_cpup instead of cpu_to_be32p. Also remove the (u32 *) typcasting of arrays obtained as output from qce_be32_to_cpu_array as per Bjorn's review comments. - Wrapped newly introduced std_iv_sha1, std_iv_sha256 and qce_be32_to_cpu_array in CONFIG_CRYPTO_DEV_QCE_AEAD to prevent W1 warnings as reported by kernel test robot <lkp@intel.com>. v1->v2: - Minor fixes like removing not needed initializing of variables and using bool values in lieu of 0 and 1 as pointed out by Bjorn. - Introduced qce_be32_to_cpu_array which converts the u8 string in big endian order to array of u32 and returns back total number of words, as per Bjorn's review comments. Presently this function is used only by qce_setup_regs_aead to format keys, iv and nonce. cipher and hash algorithms can be made to use this function as a separate clean up patch. drivers/crypto/qce/common.c | 162 +++++++++++++++++++++++++++++++++++- 1 file changed, 160 insertions(+), 2 deletions(-)