apm821xx: backport and reassign crypto4xx patches
This patch backports several patches that went upstream into Herbert Xu's cryptodev-2.6 tree: crypto: Use zeroing memory allocator instead of allocator/memset crypto: crypto4xx - performance optimizations crypto: crypto4xx - convert to skcipher crypto: crypto4xx - avoid VLA use crypto: crypto4xx - add aes-ctr support crypto: crypto4xx - properly set IV after de- and encrypt crypto: crypto4xx - extend aead fallback checks crypto: crypto4xx - put temporary dst sg into request ctx The older, outstanding patches from 120-wxyz series have been upstreamed as well and therefore they have been reassigned to fit into the series. Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
This commit is contained in:
parent
e6e51ce87f
commit
16e39624b7
13 changed files with 1268 additions and 12 deletions
|
@ -0,0 +1,39 @@
|
|||
From 75d68369b544acc5d14c18a827654dfff248d09d Mon Sep 17 00:00:00 2001
|
||||
From: Himanshu Jha <himanshujha199640@gmail.com>
|
||||
Date: Sun, 31 Dec 2017 17:54:23 +0530
|
||||
Subject: [PATCH 1/8] crypto: Use zeroing memory allocator instead of
|
||||
allocator/memset
|
||||
|
||||
Use dma_zalloc_coherent for allocating zeroed
|
||||
memory and remove unnecessary memset function.
|
||||
|
||||
Done using Coccinelle.
|
||||
Generated-by: scripts/coccinelle/api/alloc/kzalloc-simple.cocci
|
||||
0-day tested with no failures.
|
||||
|
||||
Signed-off-by: Himanshu Jha <himanshujha199640@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 8 +++-----
|
||||
1 file changed, 3 insertions(+), 5 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -282,14 +282,12 @@ static u32 crypto4xx_put_pd_to_pdr(struc
|
||||
*/
|
||||
static u32 crypto4xx_build_gdr(struct crypto4xx_device *dev)
|
||||
{
|
||||
- dev->gdr = dma_alloc_coherent(dev->core_dev->device,
|
||||
- sizeof(struct ce_gd) * PPC4XX_NUM_GD,
|
||||
- &dev->gdr_pa, GFP_ATOMIC);
|
||||
+ dev->gdr = dma_zalloc_coherent(dev->core_dev->device,
|
||||
+ sizeof(struct ce_gd) * PPC4XX_NUM_GD,
|
||||
+ &dev->gdr_pa, GFP_ATOMIC);
|
||||
if (!dev->gdr)
|
||||
return -ENOMEM;
|
||||
|
||||
- memset(dev->gdr, 0, sizeof(struct ce_gd) * PPC4XX_NUM_GD);
|
||||
-
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -1,13 +1,14 @@
|
|||
From 30afcbb01a750a1ef0cee8a0861a347912c2e4fb Mon Sep 17 00:00:00 2001
|
||||
From a8d79d7bfb14f471914017103ee2329a74e5e89d Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 21 Dec 2017 16:00:01 +0100
|
||||
Subject: [PATCH 6/6] crypto: crypto4xx - performance optimizations
|
||||
Date: Thu, 19 Apr 2018 18:41:51 +0200
|
||||
Subject: crypto: crypto4xx - performance optimizations
|
||||
|
||||
This patch provides a cheap 2MiB/s+ (~ 6%) performance
|
||||
improvement over the current code. This is because the
|
||||
compiler can now optimize several endian swap memcpy.
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 32 +++++++++++++++++++-------------
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 22 +++++++++++-----------
|
||||
|
@ -70,7 +71,7 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
/**
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -582,7 +582,7 @@ static void crypto4xx_aead_done(struct c
|
||||
@@ -580,7 +580,7 @@ static void crypto4xx_aead_done(struct c
|
||||
struct scatterlist *dst = pd_uinfo->dest_va;
|
||||
size_t cp_len = crypto_aead_authsize(
|
||||
crypto_aead_reqtfm(aead_req));
|
||||
|
@ -79,7 +80,7 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
int err = 0;
|
||||
|
||||
if (pd_uinfo->using_sd) {
|
||||
@@ -597,7 +597,7 @@ static void crypto4xx_aead_done(struct c
|
||||
@@ -595,7 +595,7 @@ static void crypto4xx_aead_done(struct c
|
||||
if (pd_uinfo->sa_va->sa_command_0.bf.dir == DIR_OUTBOUND) {
|
||||
/* append icv at the end */
|
||||
crypto4xx_memcpy_from_le32(icv, pd_uinfo->sr_va->save_digest,
|
||||
|
@ -88,7 +89,7 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
|
||||
scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen,
|
||||
cp_len, 1);
|
||||
@@ -607,7 +607,7 @@ static void crypto4xx_aead_done(struct c
|
||||
@@ -605,7 +605,7 @@ static void crypto4xx_aead_done(struct c
|
||||
aead_req->assoclen + aead_req->cryptlen -
|
||||
cp_len, cp_len, 0);
|
||||
|
||||
|
@ -97,18 +98,18 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
|
||||
if (crypto_memneq(icv, pd_uinfo->sr_va->save_digest, cp_len))
|
||||
err = -EBADMSG;
|
||||
@@ -1124,8 +1124,8 @@ static struct crypto4xx_alg_common crypt
|
||||
@@ -1122,8 +1122,8 @@ static struct crypto4xx_alg_common crypt
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_cbc,
|
||||
- .encrypt = crypto4xx_encrypt,
|
||||
- .decrypt = crypto4xx_decrypt,
|
||||
+ .encrypt = crypto4xx_encrypt_iv,
|
||||
+ .decrypt = crypto4xx_decrypt_iv,
|
||||
+ .encrypt = crypto4xx_encrypt_iv,
|
||||
+ .decrypt = crypto4xx_decrypt_iv,
|
||||
}
|
||||
}
|
||||
}},
|
||||
@@ -1148,8 +1148,8 @@ static struct crypto4xx_alg_common crypt
|
||||
@@ -1146,8 +1146,8 @@ static struct crypto4xx_alg_common crypt
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_cfb,
|
||||
|
@ -119,7 +120,7 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
}
|
||||
}
|
||||
} },
|
||||
@@ -1197,8 +1197,8 @@ static struct crypto4xx_alg_common crypt
|
||||
@@ -1195,8 +1195,8 @@ static struct crypto4xx_alg_common crypt
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_ecb,
|
||||
|
@ -130,7 +131,7 @@ Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
|||
}
|
||||
}
|
||||
} },
|
||||
@@ -1221,8 +1221,8 @@ static struct crypto4xx_alg_common crypt
|
||||
@@ -1219,8 +1219,8 @@ static struct crypto4xx_alg_common crypt
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_ofb,
|
|
@ -0,0 +1,578 @@
|
|||
From ce05ffe10457bda487fa049016a6ba79934bdece Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:52 +0200
|
||||
Subject: [PATCH 3/8] crypto: crypto4xx - convert to skcipher
|
||||
|
||||
The ablkcipher APIs have been effectively deprecated since [1].
|
||||
This patch converts the crypto4xx driver to the new skcipher APIs.
|
||||
|
||||
[1] <https://www.spinics.net/lists/linux-crypto/msg18133.html>
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 60 ++++---
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 255 +++++++++++++--------------
|
||||
drivers/crypto/amcc/crypto4xx_core.h | 25 +--
|
||||
3 files changed, 163 insertions(+), 177 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -31,6 +31,7 @@
|
||||
#include <crypto/gcm.h>
|
||||
#include <crypto/sha.h>
|
||||
#include <crypto/ctr.h>
|
||||
+#include <crypto/skcipher.h>
|
||||
#include "crypto4xx_reg_def.h"
|
||||
#include "crypto4xx_core.h"
|
||||
#include "crypto4xx_sa.h"
|
||||
@@ -74,36 +75,37 @@ static void set_dynamic_sa_command_1(str
|
||||
sa->sa_command_1.bf.copy_hdr = cp_hdr;
|
||||
}
|
||||
|
||||
-static inline int crypto4xx_crypt(struct ablkcipher_request *req,
|
||||
+static inline int crypto4xx_crypt(struct skcipher_request *req,
|
||||
const unsigned int ivlen, bool decrypt)
|
||||
{
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
__le32 iv[ivlen];
|
||||
|
||||
if (ivlen)
|
||||
- crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
|
||||
+ crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
- req->nbytes, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
|
||||
+ req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
|
||||
ctx->sa_len, 0);
|
||||
}
|
||||
|
||||
-int crypto4xx_encrypt_noiv(struct ablkcipher_request *req)
|
||||
+int crypto4xx_encrypt_noiv(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, 0, false);
|
||||
}
|
||||
|
||||
-int crypto4xx_encrypt_iv(struct ablkcipher_request *req)
|
||||
+int crypto4xx_encrypt_iv(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, false);
|
||||
}
|
||||
|
||||
-int crypto4xx_decrypt_noiv(struct ablkcipher_request *req)
|
||||
+int crypto4xx_decrypt_noiv(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, 0, true);
|
||||
}
|
||||
|
||||
-int crypto4xx_decrypt_iv(struct ablkcipher_request *req)
|
||||
+int crypto4xx_decrypt_iv(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, true);
|
||||
}
|
||||
@@ -111,20 +113,19 @@ int crypto4xx_decrypt_iv(struct ablkciph
|
||||
/**
|
||||
* AES Functions
|
||||
*/
|
||||
-static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher,
|
||||
+static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
|
||||
const u8 *key,
|
||||
unsigned int keylen,
|
||||
unsigned char cm,
|
||||
u8 fb)
|
||||
{
|
||||
- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
struct dynamic_sa_ctl *sa;
|
||||
int rc;
|
||||
|
||||
if (keylen != AES_KEYSIZE_256 &&
|
||||
keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
|
||||
- crypto_ablkcipher_set_flags(cipher,
|
||||
+ crypto_skcipher_set_flags(cipher,
|
||||
CRYPTO_TFM_RES_BAD_KEY_LEN);
|
||||
return -EINVAL;
|
||||
}
|
||||
@@ -164,39 +165,38 @@ static int crypto4xx_setkey_aes(struct c
|
||||
return 0;
|
||||
}
|
||||
|
||||
-int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
|
||||
CRYPTO_FEEDBACK_MODE_NO_FB);
|
||||
}
|
||||
|
||||
-int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
|
||||
CRYPTO_FEEDBACK_MODE_128BIT_CFB);
|
||||
}
|
||||
|
||||
-int crypto4xx_setkey_aes_ecb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
|
||||
CRYPTO_FEEDBACK_MODE_NO_FB);
|
||||
}
|
||||
|
||||
-int crypto4xx_setkey_aes_ofb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
|
||||
CRYPTO_FEEDBACK_MODE_64BIT_OFB);
|
||||
}
|
||||
|
||||
-int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen)
|
||||
{
|
||||
- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
int rc;
|
||||
|
||||
rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
|
||||
@@ -210,31 +210,33 @@ int crypto4xx_setkey_rfc3686(struct cryp
|
||||
return 0;
|
||||
}
|
||||
|
||||
-int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req)
|
||||
+int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
|
||||
{
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
__le32 iv[AES_IV_SIZE / 4] = {
|
||||
ctx->iv_nonce,
|
||||
- cpu_to_le32p((u32 *) req->info),
|
||||
- cpu_to_le32p((u32 *) (req->info + 4)),
|
||||
+ cpu_to_le32p((u32 *) req->iv),
|
||||
+ cpu_to_le32p((u32 *) (req->iv + 4)),
|
||||
cpu_to_le32(1) };
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
- req->nbytes, iv, AES_IV_SIZE,
|
||||
+ req->cryptlen, iv, AES_IV_SIZE,
|
||||
ctx->sa_out, ctx->sa_len, 0);
|
||||
}
|
||||
|
||||
-int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
|
||||
+int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
|
||||
{
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
__le32 iv[AES_IV_SIZE / 4] = {
|
||||
ctx->iv_nonce,
|
||||
- cpu_to_le32p((u32 *) req->info),
|
||||
- cpu_to_le32p((u32 *) (req->info + 4)),
|
||||
+ cpu_to_le32p((u32 *) req->iv),
|
||||
+ cpu_to_le32p((u32 *) (req->iv + 4)),
|
||||
cpu_to_le32(1) };
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
- req->nbytes, iv, AES_IV_SIZE,
|
||||
+ req->cryptlen, iv, AES_IV_SIZE,
|
||||
ctx->sa_out, ctx->sa_len, 0);
|
||||
}
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -41,6 +41,7 @@
|
||||
#include <crypto/gcm.h>
|
||||
#include <crypto/sha.h>
|
||||
#include <crypto/scatterwalk.h>
|
||||
+#include <crypto/skcipher.h>
|
||||
#include <crypto/internal/aead.h>
|
||||
#include <crypto/internal/skcipher.h>
|
||||
#include "crypto4xx_reg_def.h"
|
||||
@@ -526,21 +527,19 @@ static void crypto4xx_ret_sg_desc(struct
|
||||
}
|
||||
}
|
||||
|
||||
-static void crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
|
||||
+static void crypto4xx_cipher_done(struct crypto4xx_device *dev,
|
||||
struct pd_uinfo *pd_uinfo,
|
||||
struct ce_pd *pd)
|
||||
{
|
||||
- struct crypto4xx_ctx *ctx;
|
||||
- struct ablkcipher_request *ablk_req;
|
||||
+ struct skcipher_request *req;
|
||||
struct scatterlist *dst;
|
||||
dma_addr_t addr;
|
||||
|
||||
- ablk_req = ablkcipher_request_cast(pd_uinfo->async_req);
|
||||
- ctx = crypto_tfm_ctx(ablk_req->base.tfm);
|
||||
+ req = skcipher_request_cast(pd_uinfo->async_req);
|
||||
|
||||
if (pd_uinfo->using_sd) {
|
||||
- crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, ablk_req->nbytes,
|
||||
- ablk_req->dst);
|
||||
+ crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo,
|
||||
+ req->cryptlen, req->dst);
|
||||
} else {
|
||||
dst = pd_uinfo->dest_va;
|
||||
addr = dma_map_page(dev->core_dev->device, sg_page(dst),
|
||||
@@ -549,8 +548,8 @@ static void crypto4xx_ablkcipher_done(st
|
||||
crypto4xx_ret_sg_desc(dev, pd_uinfo);
|
||||
|
||||
if (pd_uinfo->state & PD_ENTRY_BUSY)
|
||||
- ablkcipher_request_complete(ablk_req, -EINPROGRESS);
|
||||
- ablkcipher_request_complete(ablk_req, 0);
|
||||
+ skcipher_request_complete(req, -EINPROGRESS);
|
||||
+ skcipher_request_complete(req, 0);
|
||||
}
|
||||
|
||||
static void crypto4xx_ahash_done(struct crypto4xx_device *dev,
|
||||
@@ -641,8 +640,8 @@ static void crypto4xx_pd_done(struct cry
|
||||
struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
|
||||
|
||||
switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
|
||||
- case CRYPTO_ALG_TYPE_ABLKCIPHER:
|
||||
- crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
|
||||
+ case CRYPTO_ALG_TYPE_SKCIPHER:
|
||||
+ crypto4xx_cipher_done(dev, pd_uinfo, pd);
|
||||
break;
|
||||
case CRYPTO_ALG_TYPE_AEAD:
|
||||
crypto4xx_aead_done(dev, pd_uinfo, pd);
|
||||
@@ -936,15 +935,14 @@ static void crypto4xx_ctx_init(struct cr
|
||||
ctx->sa_len = 0;
|
||||
}
|
||||
|
||||
-static int crypto4xx_ablk_init(struct crypto_tfm *tfm)
|
||||
+static int crypto4xx_sk_init(struct crypto_skcipher *sk)
|
||||
{
|
||||
- struct crypto_alg *alg = tfm->__crt_alg;
|
||||
+ struct skcipher_alg *alg = crypto_skcipher_alg(sk);
|
||||
struct crypto4xx_alg *amcc_alg;
|
||||
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
||||
|
||||
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
|
||||
crypto4xx_ctx_init(amcc_alg, ctx);
|
||||
- tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -953,9 +951,11 @@ static void crypto4xx_common_exit(struct
|
||||
crypto4xx_free_sa(ctx);
|
||||
}
|
||||
|
||||
-static void crypto4xx_ablk_exit(struct crypto_tfm *tfm)
|
||||
+static void crypto4xx_sk_exit(struct crypto_skcipher *sk)
|
||||
{
|
||||
- crypto4xx_common_exit(crypto_tfm_ctx(tfm));
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
||||
+
|
||||
+ crypto4xx_common_exit(ctx);
|
||||
}
|
||||
|
||||
static int crypto4xx_aead_init(struct crypto_aead *tfm)
|
||||
@@ -1012,7 +1012,7 @@ static int crypto4xx_register_alg(struct
|
||||
break;
|
||||
|
||||
default:
|
||||
- rc = crypto_register_alg(&alg->alg.u.cipher);
|
||||
+ rc = crypto_register_skcipher(&alg->alg.u.cipher);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1041,7 +1041,7 @@ static void crypto4xx_unregister_alg(str
|
||||
break;
|
||||
|
||||
default:
|
||||
- crypto_unregister_alg(&alg->alg.u.cipher);
|
||||
+ crypto_unregister_skcipher(&alg->alg.u.cipher);
|
||||
}
|
||||
kfree(alg);
|
||||
}
|
||||
@@ -1103,126 +1103,109 @@ static irqreturn_t crypto4xx_ce_interrup
|
||||
*/
|
||||
static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
/* Crypto AES modes */
|
||||
- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
||||
- .cra_name = "cbc(aes)",
|
||||
- .cra_driver_name = "cbc-aes-ppc4xx",
|
||||
- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
||||
- CRYPTO_ALG_ASYNC |
|
||||
- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
- .cra_blocksize = AES_BLOCK_SIZE,
|
||||
- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
- .cra_type = &crypto_ablkcipher_type,
|
||||
- .cra_init = crypto4xx_ablk_init,
|
||||
- .cra_exit = crypto4xx_ablk_exit,
|
||||
- .cra_module = THIS_MODULE,
|
||||
- .cra_u = {
|
||||
- .ablkcipher = {
|
||||
- .min_keysize = AES_MIN_KEY_SIZE,
|
||||
- .max_keysize = AES_MAX_KEY_SIZE,
|
||||
- .ivsize = AES_IV_SIZE,
|
||||
- .setkey = crypto4xx_setkey_aes_cbc,
|
||||
- .encrypt = crypto4xx_encrypt_iv,
|
||||
- .decrypt = crypto4xx_decrypt_iv,
|
||||
- }
|
||||
- }
|
||||
- }},
|
||||
- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
||||
- .cra_name = "cfb(aes)",
|
||||
- .cra_driver_name = "cfb-aes-ppc4xx",
|
||||
- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
||||
- CRYPTO_ALG_ASYNC |
|
||||
- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
- .cra_blocksize = AES_BLOCK_SIZE,
|
||||
- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
- .cra_type = &crypto_ablkcipher_type,
|
||||
- .cra_init = crypto4xx_ablk_init,
|
||||
- .cra_exit = crypto4xx_ablk_exit,
|
||||
- .cra_module = THIS_MODULE,
|
||||
- .cra_u = {
|
||||
- .ablkcipher = {
|
||||
- .min_keysize = AES_MIN_KEY_SIZE,
|
||||
- .max_keysize = AES_MAX_KEY_SIZE,
|
||||
- .ivsize = AES_IV_SIZE,
|
||||
- .setkey = crypto4xx_setkey_aes_cfb,
|
||||
- .encrypt = crypto4xx_encrypt_iv,
|
||||
- .decrypt = crypto4xx_decrypt_iv,
|
||||
- }
|
||||
- }
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "cbc(aes)",
|
||||
+ .cra_driver_name = "cbc-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE,
|
||||
+ .ivsize = AES_IV_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_aes_cbc,
|
||||
+ .encrypt = crypto4xx_encrypt_iv,
|
||||
+ .decrypt = crypto4xx_decrypt_iv,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
||||
- .cra_name = "rfc3686(ctr(aes))",
|
||||
- .cra_driver_name = "rfc3686-ctr-aes-ppc4xx",
|
||||
- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
||||
- CRYPTO_ALG_ASYNC |
|
||||
- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
- .cra_blocksize = AES_BLOCK_SIZE,
|
||||
- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
- .cra_type = &crypto_ablkcipher_type,
|
||||
- .cra_init = crypto4xx_ablk_init,
|
||||
- .cra_exit = crypto4xx_ablk_exit,
|
||||
- .cra_module = THIS_MODULE,
|
||||
- .cra_u = {
|
||||
- .ablkcipher = {
|
||||
- .min_keysize = AES_MIN_KEY_SIZE +
|
||||
- CTR_RFC3686_NONCE_SIZE,
|
||||
- .max_keysize = AES_MAX_KEY_SIZE +
|
||||
- CTR_RFC3686_NONCE_SIZE,
|
||||
- .ivsize = CTR_RFC3686_IV_SIZE,
|
||||
- .setkey = crypto4xx_setkey_rfc3686,
|
||||
- .encrypt = crypto4xx_rfc3686_encrypt,
|
||||
- .decrypt = crypto4xx_rfc3686_decrypt,
|
||||
- }
|
||||
- }
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "cfb(aes)",
|
||||
+ .cra_driver_name = "cfb-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE,
|
||||
+ .ivsize = AES_IV_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_aes_cfb,
|
||||
+ .encrypt = crypto4xx_encrypt_iv,
|
||||
+ .decrypt = crypto4xx_decrypt_iv,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
||||
- .cra_name = "ecb(aes)",
|
||||
- .cra_driver_name = "ecb-aes-ppc4xx",
|
||||
- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
||||
- CRYPTO_ALG_ASYNC |
|
||||
- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
- .cra_blocksize = AES_BLOCK_SIZE,
|
||||
- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
- .cra_type = &crypto_ablkcipher_type,
|
||||
- .cra_init = crypto4xx_ablk_init,
|
||||
- .cra_exit = crypto4xx_ablk_exit,
|
||||
- .cra_module = THIS_MODULE,
|
||||
- .cra_u = {
|
||||
- .ablkcipher = {
|
||||
- .min_keysize = AES_MIN_KEY_SIZE,
|
||||
- .max_keysize = AES_MAX_KEY_SIZE,
|
||||
- .setkey = crypto4xx_setkey_aes_ecb,
|
||||
- .encrypt = crypto4xx_encrypt_noiv,
|
||||
- .decrypt = crypto4xx_decrypt_noiv,
|
||||
- }
|
||||
- }
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "rfc3686(ctr(aes))",
|
||||
+ .cra_driver_name = "rfc3686-ctr-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
||||
+ .ivsize = CTR_RFC3686_IV_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_rfc3686,
|
||||
+ .encrypt = crypto4xx_rfc3686_encrypt,
|
||||
+ .decrypt = crypto4xx_rfc3686_decrypt,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
||||
- .cra_name = "ofb(aes)",
|
||||
- .cra_driver_name = "ofb-aes-ppc4xx",
|
||||
- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
||||
- CRYPTO_ALG_ASYNC |
|
||||
- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
- .cra_blocksize = AES_BLOCK_SIZE,
|
||||
- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
- .cra_type = &crypto_ablkcipher_type,
|
||||
- .cra_init = crypto4xx_ablk_init,
|
||||
- .cra_exit = crypto4xx_ablk_exit,
|
||||
- .cra_module = THIS_MODULE,
|
||||
- .cra_u = {
|
||||
- .ablkcipher = {
|
||||
- .min_keysize = AES_MIN_KEY_SIZE,
|
||||
- .max_keysize = AES_MAX_KEY_SIZE,
|
||||
- .ivsize = AES_IV_SIZE,
|
||||
- .setkey = crypto4xx_setkey_aes_ofb,
|
||||
- .encrypt = crypto4xx_encrypt_iv,
|
||||
- .decrypt = crypto4xx_decrypt_iv,
|
||||
- }
|
||||
- }
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "ecb(aes)",
|
||||
+ .cra_driver_name = "ecb-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_aes_ecb,
|
||||
+ .encrypt = crypto4xx_encrypt_noiv,
|
||||
+ .decrypt = crypto4xx_decrypt_noiv,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
+ } },
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "ofb(aes)",
|
||||
+ .cra_driver_name = "ofb-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE,
|
||||
+ .ivsize = AES_IV_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_aes_ofb,
|
||||
+ .encrypt = crypto4xx_encrypt_iv,
|
||||
+ .decrypt = crypto4xx_decrypt_iv,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
|
||||
/* AEAD */
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.h
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.h
|
||||
@@ -25,6 +25,7 @@
|
||||
#include <linux/ratelimit.h>
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <crypto/internal/aead.h>
|
||||
+#include <crypto/internal/skcipher.h>
|
||||
#include "crypto4xx_reg_def.h"
|
||||
#include "crypto4xx_sa.h"
|
||||
|
||||
@@ -134,7 +135,7 @@ struct crypto4xx_ctx {
|
||||
struct crypto4xx_alg_common {
|
||||
u32 type;
|
||||
union {
|
||||
- struct crypto_alg cipher;
|
||||
+ struct skcipher_alg cipher;
|
||||
struct ahash_alg hash;
|
||||
struct aead_alg aead;
|
||||
} u;
|
||||
@@ -158,22 +159,22 @@ int crypto4xx_build_pd(struct crypto_asy
|
||||
const struct dynamic_sa_ctl *sa,
|
||||
const unsigned int sa_len,
|
||||
const unsigned int assoclen);
|
||||
-int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
-int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
-int crypto4xx_setkey_aes_ecb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
-int crypto4xx_setkey_aes_ofb(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
-int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher,
|
||||
+int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
-int crypto4xx_encrypt_iv(struct ablkcipher_request *req);
|
||||
-int crypto4xx_decrypt_iv(struct ablkcipher_request *req);
|
||||
-int crypto4xx_encrypt_noiv(struct ablkcipher_request *req);
|
||||
-int crypto4xx_decrypt_noiv(struct ablkcipher_request *req);
|
||||
-int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req);
|
||||
-int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req);
|
||||
+int crypto4xx_encrypt_iv(struct skcipher_request *req);
|
||||
+int crypto4xx_decrypt_iv(struct skcipher_request *req);
|
||||
+int crypto4xx_encrypt_noiv(struct skcipher_request *req);
|
||||
+int crypto4xx_decrypt_noiv(struct skcipher_request *req);
|
||||
+int crypto4xx_rfc3686_encrypt(struct skcipher_request *req);
|
||||
+int crypto4xx_rfc3686_decrypt(struct skcipher_request *req);
|
||||
int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm);
|
||||
int crypto4xx_hash_digest(struct ahash_request *req);
|
||||
int crypto4xx_hash_final(struct ahash_request *req);
|
|
@ -0,0 +1,61 @@
|
|||
From c4e90650ff0cbf123ec9cfc32026fa0fb2931658 Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:53 +0200
|
||||
Subject: [PATCH 4/8] crypto: crypto4xx - avoid VLA use
|
||||
|
||||
This patch fixes some of the -Wvla warnings.
|
||||
|
||||
crypto4xx_alg.c:83:19: warning: Variable length array is used.
|
||||
crypto4xx_alg.c:273:56: warning: Variable length array is used.
|
||||
crypto4xx_alg.c:380:32: warning: Variable length array is used.
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 14 ++++----------
|
||||
1 file changed, 4 insertions(+), 10 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -80,7 +80,7 @@ static inline int crypto4xx_crypt(struct
|
||||
{
|
||||
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
- __le32 iv[ivlen];
|
||||
+ __le32 iv[AES_IV_SIZE];
|
||||
|
||||
if (ivlen)
|
||||
crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
|
||||
@@ -270,13 +270,7 @@ static inline bool crypto4xx_aead_need_f
|
||||
static int crypto4xx_aead_fallback(struct aead_request *req,
|
||||
struct crypto4xx_ctx *ctx, bool do_decrypt)
|
||||
{
|
||||
- char aead_req_data[sizeof(struct aead_request) +
|
||||
- crypto_aead_reqsize(ctx->sw_cipher.aead)]
|
||||
- __aligned(__alignof__(struct aead_request));
|
||||
-
|
||||
- struct aead_request *subreq = (void *) aead_req_data;
|
||||
-
|
||||
- memset(subreq, 0, sizeof(aead_req_data));
|
||||
+ struct aead_request *subreq = aead_request_ctx(req);
|
||||
|
||||
aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
|
||||
aead_request_set_callback(subreq, req->base.flags,
|
||||
@@ -377,7 +371,7 @@ static int crypto4xx_crypt_aes_ccm(struc
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
unsigned int len = req->cryptlen;
|
||||
__le32 iv[16];
|
||||
- u32 tmp_sa[ctx->sa_len * 4];
|
||||
+ u32 tmp_sa[SA_AES128_CCM_LEN + 4];
|
||||
struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
|
||||
|
||||
if (crypto4xx_aead_need_fallback(req, true, decrypt))
|
||||
@@ -386,7 +380,7 @@ static int crypto4xx_crypt_aes_ccm(struc
|
||||
if (decrypt)
|
||||
len -= crypto_aead_authsize(aead);
|
||||
|
||||
- memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, sizeof(tmp_sa));
|
||||
+ memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
|
||||
sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
|
||||
|
||||
if (req->iv[0] == 1) {
|
|
@ -0,0 +1,247 @@
|
|||
From 98e87e3d933b8e504ea41b8857c038d2cd06cddc Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:54 +0200
|
||||
Subject: [PATCH 5/8] crypto: crypto4xx - add aes-ctr support
|
||||
|
||||
This patch adds support for the aes-ctr skcipher.
|
||||
|
||||
name : ctr(aes)
|
||||
driver : ctr-aes-ppc4xx
|
||||
module : crypto4xx
|
||||
priority : 300
|
||||
refcnt : 1
|
||||
selftest : passed
|
||||
internal : no
|
||||
type : skcipher
|
||||
async : yes
|
||||
blocksize : 16
|
||||
min keysize : 16
|
||||
max keysize : 32
|
||||
ivsize : 16
|
||||
chunksize : 16
|
||||
walksize : 16
|
||||
|
||||
The hardware uses only the last 32-bits as the counter while the
|
||||
kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
|
||||
the whole IV is a counter. To make this work, the driver will
|
||||
fallback if the counter is going to overlow.
|
||||
|
||||
The aead's crypto4xx_setup_fallback() function is renamed to
|
||||
crypto4xx_aead_setup_fallback.
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 91 ++++++++++++++++++++++++++--
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 37 +++++++++++
|
||||
drivers/crypto/amcc/crypto4xx_core.h | 5 ++
|
||||
3 files changed, 127 insertions(+), 6 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -240,6 +240,85 @@ int crypto4xx_rfc3686_decrypt(struct skc
|
||||
ctx->sa_out, ctx->sa_len, 0);
|
||||
}
|
||||
|
||||
+static int
|
||||
+crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
|
||||
+{
|
||||
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
+ size_t iv_len = crypto_skcipher_ivsize(cipher);
|
||||
+ unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
|
||||
+ unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
|
||||
+ AES_BLOCK_SIZE;
|
||||
+
|
||||
+ /*
|
||||
+ * The hardware uses only the last 32-bits as the counter while the
|
||||
+ * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
|
||||
+ * the whole IV is a counter. So fallback if the counter is going to
|
||||
+ * overlow.
|
||||
+ */
|
||||
+ if (counter + nblks < counter) {
|
||||
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
|
||||
+ int ret;
|
||||
+
|
||||
+ skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
|
||||
+ skcipher_request_set_callback(subreq, req->base.flags,
|
||||
+ NULL, NULL);
|
||||
+ skcipher_request_set_crypt(subreq, req->src, req->dst,
|
||||
+ req->cryptlen, req->iv);
|
||||
+ ret = encrypt ? crypto_skcipher_encrypt(subreq)
|
||||
+ : crypto_skcipher_decrypt(subreq);
|
||||
+ skcipher_request_zero(subreq);
|
||||
+ return ret;
|
||||
+ }
|
||||
+
|
||||
+ return encrypt ? crypto4xx_encrypt_iv(req)
|
||||
+ : crypto4xx_decrypt_iv(req);
|
||||
+}
|
||||
+
|
||||
+static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
|
||||
+ struct crypto_skcipher *cipher,
|
||||
+ const u8 *key,
|
||||
+ unsigned int keylen)
|
||||
+{
|
||||
+ int rc;
|
||||
+
|
||||
+ crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
|
||||
+ CRYPTO_TFM_REQ_MASK);
|
||||
+ crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
|
||||
+ crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
|
||||
+ rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
|
||||
+ crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
|
||||
+ crypto_skcipher_set_flags(cipher,
|
||||
+ crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
|
||||
+ CRYPTO_TFM_RES_MASK);
|
||||
+
|
||||
+ return rc;
|
||||
+}
|
||||
+
|
||||
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
|
||||
+ const u8 *key, unsigned int keylen)
|
||||
+{
|
||||
+ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
+ int rc;
|
||||
+
|
||||
+ rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
|
||||
+ if (rc)
|
||||
+ return rc;
|
||||
+
|
||||
+ return crypto4xx_setkey_aes(cipher, key, keylen,
|
||||
+ CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
|
||||
+}
|
||||
+
|
||||
+int crypto4xx_encrypt_ctr(struct skcipher_request *req)
|
||||
+{
|
||||
+ return crypto4xx_ctr_crypt(req, true);
|
||||
+}
|
||||
+
|
||||
+int crypto4xx_decrypt_ctr(struct skcipher_request *req)
|
||||
+{
|
||||
+ return crypto4xx_ctr_crypt(req, false);
|
||||
+}
|
||||
+
|
||||
static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
|
||||
bool is_ccm, bool decrypt)
|
||||
{
|
||||
@@ -282,10 +361,10 @@ static int crypto4xx_aead_fallback(struc
|
||||
crypto_aead_encrypt(subreq);
|
||||
}
|
||||
|
||||
-static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx,
|
||||
- struct crypto_aead *cipher,
|
||||
- const u8 *key,
|
||||
- unsigned int keylen)
|
||||
+static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
|
||||
+ struct crypto_aead *cipher,
|
||||
+ const u8 *key,
|
||||
+ unsigned int keylen)
|
||||
{
|
||||
int rc;
|
||||
|
||||
@@ -313,7 +392,7 @@ int crypto4xx_setkey_aes_ccm(struct cryp
|
||||
struct dynamic_sa_ctl *sa;
|
||||
int rc = 0;
|
||||
|
||||
- rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
|
||||
+ rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
|
||||
if (rc)
|
||||
return rc;
|
||||
|
||||
@@ -472,7 +551,7 @@ int crypto4xx_setkey_aes_gcm(struct cryp
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
- rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
|
||||
+ rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
|
||||
if (rc)
|
||||
return rc;
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -941,6 +941,19 @@ static int crypto4xx_sk_init(struct cryp
|
||||
struct crypto4xx_alg *amcc_alg;
|
||||
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
||||
|
||||
+ if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
|
||||
+ ctx->sw_cipher.cipher =
|
||||
+ crypto_alloc_skcipher(alg->base.cra_name, 0,
|
||||
+ CRYPTO_ALG_NEED_FALLBACK |
|
||||
+ CRYPTO_ALG_ASYNC);
|
||||
+ if (IS_ERR(ctx->sw_cipher.cipher))
|
||||
+ return PTR_ERR(ctx->sw_cipher.cipher);
|
||||
+
|
||||
+ crypto_skcipher_set_reqsize(sk,
|
||||
+ sizeof(struct skcipher_request) + 32 +
|
||||
+ crypto_skcipher_reqsize(ctx->sw_cipher.cipher));
|
||||
+ }
|
||||
+
|
||||
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
|
||||
crypto4xx_ctx_init(amcc_alg, ctx);
|
||||
return 0;
|
||||
@@ -956,6 +969,8 @@ static void crypto4xx_sk_exit(struct cry
|
||||
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
||||
|
||||
crypto4xx_common_exit(ctx);
|
||||
+ if (ctx->sw_cipher.cipher)
|
||||
+ crypto_free_skcipher(ctx->sw_cipher.cipher);
|
||||
}
|
||||
|
||||
static int crypto4xx_aead_init(struct crypto_aead *tfm)
|
||||
@@ -1145,6 +1160,28 @@ static struct crypto4xx_alg_common crypt
|
||||
.init = crypto4xx_sk_init,
|
||||
.exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
+ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
+ .base = {
|
||||
+ .cra_name = "ctr(aes)",
|
||||
+ .cra_driver_name = "ctr-aes-ppc4xx",
|
||||
+ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
||||
+ CRYPTO_ALG_NEED_FALLBACK |
|
||||
+ CRYPTO_ALG_ASYNC |
|
||||
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
+ .cra_blocksize = AES_BLOCK_SIZE,
|
||||
+ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
+ .cra_module = THIS_MODULE,
|
||||
+ },
|
||||
+ .min_keysize = AES_MIN_KEY_SIZE,
|
||||
+ .max_keysize = AES_MAX_KEY_SIZE,
|
||||
+ .ivsize = AES_IV_SIZE,
|
||||
+ .setkey = crypto4xx_setkey_aes_ctr,
|
||||
+ .encrypt = crypto4xx_encrypt_ctr,
|
||||
+ .decrypt = crypto4xx_decrypt_ctr,
|
||||
+ .init = crypto4xx_sk_init,
|
||||
+ .exit = crypto4xx_sk_exit,
|
||||
+ } },
|
||||
{ .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
||||
.base = {
|
||||
.cra_name = "rfc3686(ctr(aes))",
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.h
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.h
|
||||
@@ -128,6 +128,7 @@ struct crypto4xx_ctx {
|
||||
__le32 iv_nonce;
|
||||
u32 sa_len;
|
||||
union {
|
||||
+ struct crypto_skcipher *cipher;
|
||||
struct crypto_aead *aead;
|
||||
} sw_cipher;
|
||||
};
|
||||
@@ -163,12 +164,16 @@ int crypto4xx_setkey_aes_cbc(struct cryp
|
||||
const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
|
||||
+ const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
+int crypto4xx_encrypt_ctr(struct skcipher_request *req);
|
||||
+int crypto4xx_decrypt_ctr(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_iv(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_iv(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_noiv(struct skcipher_request *req);
|
|
@ -0,0 +1,71 @@
|
|||
From fc340115ffb8235c1bbd200c28855e6373d0dd1a Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:55 +0200
|
||||
Subject: [PATCH 6/8] crypto: crypto4xx - properly set IV after de- and encrypt
|
||||
|
||||
This patch fixes cts(cbc(aes)) test when cbc-aes-ppc4xx is used.
|
||||
alg: skcipher: Test 1 failed (invalid result) on encryption for cts(cbc-aes-ppc4xx)
|
||||
00000000: 4b 10 75 fc 2f 14 1b 6a 27 35 37 33 d1 b7 70 05
|
||||
00000010: 97
|
||||
alg: skcipher: Failed to load transform for cts(cbc(aes)): -2
|
||||
|
||||
The CTS cipher mode expect the IV (req->iv) of skcipher_request
|
||||
to contain the last ciphertext block after the {en,de}crypt
|
||||
operation is complete.
|
||||
|
||||
Fix this issue for the AMCC Crypto4xx hardware engine.
|
||||
The tcrypt test case for cts(cbc(aes)) is now correctly passed.
|
||||
|
||||
name : cts(cbc(aes))
|
||||
driver : cts(cbc-aes-ppc4xx)
|
||||
module : cts
|
||||
priority : 300
|
||||
refcnt : 1
|
||||
selftest : passed
|
||||
internal : no
|
||||
type : skcipher
|
||||
async : yes
|
||||
blocksize : 16
|
||||
min keysize : 16
|
||||
max keysize : 32
|
||||
ivsize : 16
|
||||
chunksize : 16
|
||||
walksize : 16
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 3 ++-
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 9 +++++++++
|
||||
2 files changed, 11 insertions(+), 1 deletion(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -141,7 +141,8 @@ static int crypto4xx_setkey_aes(struct c
|
||||
/* Setup SA */
|
||||
sa = ctx->sa_in;
|
||||
|
||||
- set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, SA_NOT_SAVE_IV,
|
||||
+ set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_CBC ?
|
||||
+ SA_SAVE_IV : SA_NOT_SAVE_IV),
|
||||
SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
|
||||
SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
|
||||
SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -545,6 +545,15 @@ static void crypto4xx_cipher_done(struct
|
||||
addr = dma_map_page(dev->core_dev->device, sg_page(dst),
|
||||
dst->offset, dst->length, DMA_FROM_DEVICE);
|
||||
}
|
||||
+
|
||||
+ if (pd_uinfo->sa_va->sa_command_0.bf.save_iv == SA_SAVE_IV) {
|
||||
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
|
||||
+
|
||||
+ crypto4xx_memcpy_from_le32((u32 *)req->iv,
|
||||
+ pd_uinfo->sr_va->save_iv,
|
||||
+ crypto_skcipher_ivsize(skcipher));
|
||||
+ }
|
||||
+
|
||||
crypto4xx_ret_sg_desc(dev, pd_uinfo);
|
||||
|
||||
if (pd_uinfo->state & PD_ENTRY_BUSY)
|
|
@ -0,0 +1,102 @@
|
|||
From 584201f1895d915c1aa523bc86afdc126e94beca Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:56 +0200
|
||||
Subject: [PATCH 7/8] crypto: crypto4xx - extend aead fallback checks
|
||||
|
||||
1020 bytes is the limit for associated data. Any more
|
||||
and it will no longer fit into hash_crypto_offset anymore.
|
||||
|
||||
The hardware will not process aead requests with plaintext
|
||||
that have less than AES_BLOCK_SIZE bytes. When decrypting
|
||||
aead requests the authsize has to be taken in account as
|
||||
well, as it is part of the cryptlen. Otherwise the hardware
|
||||
will think it has been misconfigured and will return:
|
||||
|
||||
aead return err status = 0x98
|
||||
|
||||
For rtc4543(gcm(aes)), the hardware has a dedicated GMAC
|
||||
mode as part of the hash function set.
|
||||
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 30 +++++++++++++++--------------
|
||||
1 file changed, 16 insertions(+), 14 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -321,6 +321,7 @@ int crypto4xx_decrypt_ctr(struct skciphe
|
||||
}
|
||||
|
||||
static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
|
||||
+ unsigned int len,
|
||||
bool is_ccm, bool decrypt)
|
||||
{
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
@@ -330,14 +331,14 @@ static inline bool crypto4xx_aead_need_f
|
||||
return true;
|
||||
|
||||
/*
|
||||
- * hardware does not handle cases where cryptlen
|
||||
- * is less than a block
|
||||
+ * hardware does not handle cases where plaintext
|
||||
+ * is less than a block.
|
||||
*/
|
||||
- if (req->cryptlen < AES_BLOCK_SIZE)
|
||||
+ if (len < AES_BLOCK_SIZE)
|
||||
return true;
|
||||
|
||||
- /* assoc len needs to be a multiple of 4 */
|
||||
- if (req->assoclen & 0x3)
|
||||
+ /* assoc len needs to be a multiple of 4 and <= 1020 */
|
||||
+ if (req->assoclen & 0x3 || req->assoclen > 1020)
|
||||
return true;
|
||||
|
||||
/* CCM supports only counter field length of 2 and 4 bytes */
|
||||
@@ -449,17 +450,17 @@ static int crypto4xx_crypt_aes_ccm(struc
|
||||
{
|
||||
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
- unsigned int len = req->cryptlen;
|
||||
__le32 iv[16];
|
||||
u32 tmp_sa[SA_AES128_CCM_LEN + 4];
|
||||
struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
|
||||
-
|
||||
- if (crypto4xx_aead_need_fallback(req, true, decrypt))
|
||||
- return crypto4xx_aead_fallback(req, ctx, decrypt);
|
||||
+ unsigned int len = req->cryptlen;
|
||||
|
||||
if (decrypt)
|
||||
len -= crypto_aead_authsize(aead);
|
||||
|
||||
+ if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
|
||||
+ return crypto4xx_aead_fallback(req, ctx, decrypt);
|
||||
+
|
||||
memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
|
||||
sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
|
||||
|
||||
@@ -605,18 +606,19 @@ static inline int crypto4xx_crypt_aes_gc
|
||||
bool decrypt)
|
||||
{
|
||||
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
- unsigned int len = req->cryptlen;
|
||||
+ struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
|
||||
__le32 iv[4];
|
||||
+ unsigned int len = req->cryptlen;
|
||||
+
|
||||
+ if (decrypt)
|
||||
+ len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
|
||||
|
||||
- if (crypto4xx_aead_need_fallback(req, false, decrypt))
|
||||
+ if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
|
||||
return crypto4xx_aead_fallback(req, ctx, decrypt);
|
||||
|
||||
crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
|
||||
iv[3] = cpu_to_le32(1);
|
||||
|
||||
- if (decrypt)
|
||||
- len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
|
||||
-
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
len, iv, sizeof(iv),
|
||||
decrypt ? ctx->sa_in : ctx->sa_out,
|
|
@ -0,0 +1,157 @@
|
|||
From 658c9d2b9f374c835d0348d852a3f002196628d0 Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lamparter <chunkeey@gmail.com>
|
||||
Date: Thu, 19 Apr 2018 18:41:57 +0200
|
||||
Subject: [PATCH 8/8] crypto: crypto4xx - put temporary dst sg into request ctx
|
||||
|
||||
This patch fixes a crash that happens when testing rfc4543(gcm(aes))
|
||||
|
||||
Unable to handle kernel paging request for data at address 0xf59b3420
|
||||
Faulting instruction address: 0xc0012994
|
||||
Oops: Kernel access of bad area, sig: 11 [#1]
|
||||
BE PowerPC 44x Platform
|
||||
Modules linked in: tcrypt(+) crypto4xx [...]
|
||||
CPU: 0 PID: 0 Comm: swapper Tainted: G O 4.17.0-rc1+ #23
|
||||
NIP: c0012994 LR: d3077934 CTR: 06026d49
|
||||
REGS: cfff7e30 TRAP: 0300 Tainted: G O (4.17.0-rc1+)
|
||||
MSR: 00029000 <CE,EE,ME> CR: 44744822 XER: 00000000
|
||||
DEAR: f59b3420 ESR: 00000000
|
||||
NIP [c0012994] __dma_sync+0x58/0x10c
|
||||
LR [d3077934] crypto4xx_bh_tasklet_cb+0x188/0x3c8 [crypto4xx]
|
||||
|
||||
__dma_sync was fed the temporary _dst that crypto4xx_build_pd()
|
||||
had in it's function stack. This clearly never worked.
|
||||
This patch therefore overhauls the code from the original driver
|
||||
and puts the temporary dst sg list into aead's request context.
|
||||
|
||||
Fixes: a0aae821ba3d3 ("crypto: crypto4xx - prepare for AEAD support")
|
||||
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
|
||||
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
|
||||
---
|
||||
drivers/crypto/amcc/crypto4xx_alg.c | 15 ++++++++-------
|
||||
drivers/crypto/amcc/crypto4xx_core.c | 10 +++++-----
|
||||
drivers/crypto/amcc/crypto4xx_core.h | 7 ++++++-
|
||||
3 files changed, 19 insertions(+), 13 deletions(-)
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
||||
@@ -87,7 +87,7 @@ static inline int crypto4xx_crypt(struct
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
|
||||
- ctx->sa_len, 0);
|
||||
+ ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_noiv(struct skcipher_request *req)
|
||||
@@ -223,7 +223,7 @@ int crypto4xx_rfc3686_encrypt(struct skc
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
req->cryptlen, iv, AES_IV_SIZE,
|
||||
- ctx->sa_out, ctx->sa_len, 0);
|
||||
+ ctx->sa_out, ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
|
||||
@@ -238,7 +238,7 @@ int crypto4xx_rfc3686_decrypt(struct skc
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
req->cryptlen, iv, AES_IV_SIZE,
|
||||
- ctx->sa_out, ctx->sa_len, 0);
|
||||
+ ctx->sa_out, ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
static int
|
||||
@@ -449,6 +449,7 @@ int crypto4xx_setkey_aes_ccm(struct cryp
|
||||
static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
|
||||
{
|
||||
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
||||
+ struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
|
||||
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
||||
__le32 iv[16];
|
||||
u32 tmp_sa[SA_AES128_CCM_LEN + 4];
|
||||
@@ -474,7 +475,7 @@ static int crypto4xx_crypt_aes_ccm(struc
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
len, iv, sizeof(iv),
|
||||
- sa, ctx->sa_len, req->assoclen);
|
||||
+ sa, ctx->sa_len, req->assoclen, rctx->dst);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
|
||||
@@ -622,7 +623,7 @@ static inline int crypto4xx_crypt_aes_gc
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
||||
len, iv, sizeof(iv),
|
||||
decrypt ? ctx->sa_in : ctx->sa_out,
|
||||
- ctx->sa_len, req->assoclen);
|
||||
+ ctx->sa_len, req->assoclen, rctx->dst);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
|
||||
@@ -707,7 +708,7 @@ int crypto4xx_hash_update(struct ahash_r
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
|
||||
req->nbytes, NULL, 0, ctx->sa_in,
|
||||
- ctx->sa_len, 0);
|
||||
+ ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
int crypto4xx_hash_final(struct ahash_request *req)
|
||||
@@ -726,7 +727,7 @@ int crypto4xx_hash_digest(struct ahash_r
|
||||
|
||||
return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
|
||||
req->nbytes, NULL, 0, ctx->sa_in,
|
||||
- ctx->sa_len, 0);
|
||||
+ ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
/**
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.c
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.c
|
||||
@@ -695,9 +695,9 @@ int crypto4xx_build_pd(struct crypto_asy
|
||||
const __le32 *iv, const u32 iv_len,
|
||||
const struct dynamic_sa_ctl *req_sa,
|
||||
const unsigned int sa_len,
|
||||
- const unsigned int assoclen)
|
||||
+ const unsigned int assoclen,
|
||||
+ struct scatterlist *_dst)
|
||||
{
|
||||
- struct scatterlist _dst[2];
|
||||
struct crypto4xx_device *dev = ctx->dev;
|
||||
struct dynamic_sa_ctl *sa;
|
||||
struct ce_gd *gd;
|
||||
@@ -996,9 +996,9 @@ static int crypto4xx_aead_init(struct cr
|
||||
|
||||
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.aead);
|
||||
crypto4xx_ctx_init(amcc_alg, ctx);
|
||||
- crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
|
||||
- max(sizeof(struct crypto4xx_ctx), 32 +
|
||||
- crypto_aead_reqsize(ctx->sw_cipher.aead)));
|
||||
+ crypto_aead_set_reqsize(tfm, max(sizeof(struct aead_request) + 32 +
|
||||
+ crypto_aead_reqsize(ctx->sw_cipher.aead),
|
||||
+ sizeof(struct crypto4xx_aead_reqctx)));
|
||||
return 0;
|
||||
}
|
||||
|
||||
--- a/drivers/crypto/amcc/crypto4xx_core.h
|
||||
+++ b/drivers/crypto/amcc/crypto4xx_core.h
|
||||
@@ -133,6 +133,10 @@ struct crypto4xx_ctx {
|
||||
} sw_cipher;
|
||||
};
|
||||
|
||||
+struct crypto4xx_aead_reqctx {
|
||||
+ struct scatterlist dst[2];
|
||||
+};
|
||||
+
|
||||
struct crypto4xx_alg_common {
|
||||
u32 type;
|
||||
union {
|
||||
@@ -159,7 +163,8 @@ int crypto4xx_build_pd(struct crypto_asy
|
||||
const __le32 *iv, const u32 iv_len,
|
||||
const struct dynamic_sa_ctl *sa,
|
||||
const unsigned int sa_len,
|
||||
- const unsigned int assoclen);
|
||||
+ const unsigned int assoclen,
|
||||
+ struct scatterlist *dst_tmp);
|
||||
int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
Loading…
Reference in a new issue