Commit 2f77690d authored by Christian Lamparter's avatar Christian Lamparter Committed by Herbert Xu

crypto: crypto4xx - simplify sa and state context acquisition

Thanks to the big overhaul of crypto4xx_build_pd(), the request-local
sa_in, sa_out and state_record allocation can be simplified.

There's no need to setup any dma coherent memory anymore and
much of the support code can be removed.
Signed-off-by: default avatarChristian Lamparter <chunkeey@gmail.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 4b5b7999
...@@ -122,20 +122,13 @@ static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher, ...@@ -122,20 +122,13 @@ static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher,
} }
/* Create SA */ /* Create SA */
if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr) if (ctx->sa_in || ctx->sa_out)
crypto4xx_free_sa(ctx); crypto4xx_free_sa(ctx);
rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4); rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
if (rc) if (rc)
return rc; return rc;
if (ctx->state_record_dma_addr == 0) {
rc = crypto4xx_alloc_state_record(ctx);
if (rc) {
crypto4xx_free_sa(ctx);
return rc;
}
}
/* Setup SA */ /* Setup SA */
sa = ctx->sa_in; sa = ctx->sa_in;
...@@ -203,8 +196,8 @@ int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher, ...@@ -203,8 +196,8 @@ int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher,
if (rc) if (rc)
return rc; return rc;
crypto4xx_memcpy_to_le32(ctx->state_record->save_iv, ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
key + keylen - CTR_RFC3686_NONCE_SIZE, CTR_RFC3686_NONCE_SIZE); CTR_RFC3686_NONCE_SIZE]);
return 0; return 0;
} }
...@@ -213,7 +206,7 @@ int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req) ...@@ -213,7 +206,7 @@ int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req)
{ {
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
__le32 iv[AES_IV_SIZE / 4] = { __le32 iv[AES_IV_SIZE / 4] = {
ctx->state_record->save_iv[0], ctx->iv_nonce,
cpu_to_le32p((u32 *) req->info), cpu_to_le32p((u32 *) req->info),
cpu_to_le32p((u32 *) (req->info + 4)), cpu_to_le32p((u32 *) (req->info + 4)),
cpu_to_le32(1) }; cpu_to_le32(1) };
...@@ -227,7 +220,7 @@ int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req) ...@@ -227,7 +220,7 @@ int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
{ {
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
__le32 iv[AES_IV_SIZE / 4] = { __le32 iv[AES_IV_SIZE / 4] = {
ctx->state_record->save_iv[0], ctx->iv_nonce,
cpu_to_le32p((u32 *) req->info), cpu_to_le32p((u32 *) req->info),
cpu_to_le32p((u32 *) (req->info + 4)), cpu_to_le32p((u32 *) (req->info + 4)),
cpu_to_le32(1) }; cpu_to_le32(1) };
...@@ -254,21 +247,13 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, ...@@ -254,21 +247,13 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
ctx->dev = my_alg->dev; ctx->dev = my_alg->dev;
/* Create SA */ /* Create SA */
if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr) if (ctx->sa_in || ctx->sa_out)
crypto4xx_free_sa(ctx); crypto4xx_free_sa(ctx);
rc = crypto4xx_alloc_sa(ctx, sa_len); rc = crypto4xx_alloc_sa(ctx, sa_len);
if (rc) if (rc)
return rc; return rc;
if (ctx->state_record_dma_addr == 0) {
crypto4xx_alloc_state_record(ctx);
if (!ctx->state_record_dma_addr) {
crypto4xx_free_sa(ctx);
return -ENOMEM;
}
}
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct crypto4xx_ctx)); sizeof(struct crypto4xx_ctx));
sa = (struct dynamic_sa_hash160 *)ctx->sa_in; sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
......
...@@ -130,21 +130,17 @@ static void crypto4xx_hw_init(struct crypto4xx_device *dev) ...@@ -130,21 +130,17 @@ static void crypto4xx_hw_init(struct crypto4xx_device *dev)
int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size) int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size)
{ {
ctx->sa_in = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4, ctx->sa_in = kzalloc(size * 4, GFP_ATOMIC);
&ctx->sa_in_dma_addr, GFP_ATOMIC);
if (ctx->sa_in == NULL) if (ctx->sa_in == NULL)
return -ENOMEM; return -ENOMEM;
ctx->sa_out = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4, ctx->sa_out = kzalloc(size * 4, GFP_ATOMIC);
&ctx->sa_out_dma_addr, GFP_ATOMIC);
if (ctx->sa_out == NULL) { if (ctx->sa_out == NULL) {
dma_free_coherent(ctx->dev->core_dev->device, size * 4, kfree(ctx->sa_in);
ctx->sa_in, ctx->sa_in_dma_addr); ctx->sa_in = NULL;
return -ENOMEM; return -ENOMEM;
} }
memset(ctx->sa_in, 0, size * 4);
memset(ctx->sa_out, 0, size * 4);
ctx->sa_len = size; ctx->sa_len = size;
return 0; return 0;
...@@ -152,40 +148,13 @@ int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size) ...@@ -152,40 +148,13 @@ int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size)
void crypto4xx_free_sa(struct crypto4xx_ctx *ctx) void crypto4xx_free_sa(struct crypto4xx_ctx *ctx)
{ {
if (ctx->sa_in != NULL) kfree(ctx->sa_in);
dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4, ctx->sa_in = NULL;
ctx->sa_in, ctx->sa_in_dma_addr); kfree(ctx->sa_out);
if (ctx->sa_out != NULL) ctx->sa_out = NULL;
dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4,
ctx->sa_out, ctx->sa_out_dma_addr);
ctx->sa_in_dma_addr = 0;
ctx->sa_out_dma_addr = 0;
ctx->sa_len = 0; ctx->sa_len = 0;
} }
u32 crypto4xx_alloc_state_record(struct crypto4xx_ctx *ctx)
{
ctx->state_record = dma_alloc_coherent(ctx->dev->core_dev->device,
sizeof(struct sa_state_record),
&ctx->state_record_dma_addr, GFP_ATOMIC);
if (!ctx->state_record_dma_addr)
return -ENOMEM;
memset(ctx->state_record, 0, sizeof(struct sa_state_record));
return 0;
}
static void crypto4xx_free_state_record(struct crypto4xx_ctx *ctx)
{
if (ctx->state_record != NULL)
dma_free_coherent(ctx->dev->core_dev->device,
sizeof(struct sa_state_record),
ctx->state_record,
ctx->state_record_dma_addr);
ctx->state_record_dma_addr = 0;
}
/** /**
* alloc memory for the gather ring * alloc memory for the gather ring
* no need to alloc buf for the ring * no need to alloc buf for the ring
...@@ -883,8 +852,6 @@ static int crypto4xx_alg_init(struct crypto_tfm *tfm) ...@@ -883,8 +852,6 @@ static int crypto4xx_alg_init(struct crypto_tfm *tfm)
ctx->dev = amcc_alg->dev; ctx->dev = amcc_alg->dev;
ctx->sa_in = NULL; ctx->sa_in = NULL;
ctx->sa_out = NULL; ctx->sa_out = NULL;
ctx->sa_in_dma_addr = 0;
ctx->sa_out_dma_addr = 0;
ctx->sa_len = 0; ctx->sa_len = 0;
switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
...@@ -905,7 +872,6 @@ static void crypto4xx_alg_exit(struct crypto_tfm *tfm) ...@@ -905,7 +872,6 @@ static void crypto4xx_alg_exit(struct crypto_tfm *tfm)
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
crypto4xx_free_sa(ctx); crypto4xx_free_sa(ctx);
crypto4xx_free_state_record(ctx);
} }
int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
......
...@@ -122,11 +122,8 @@ struct crypto4xx_core_device { ...@@ -122,11 +122,8 @@ struct crypto4xx_core_device {
struct crypto4xx_ctx { struct crypto4xx_ctx {
struct crypto4xx_device *dev; struct crypto4xx_device *dev;
struct dynamic_sa_ctl *sa_in; struct dynamic_sa_ctl *sa_in;
dma_addr_t sa_in_dma_addr;
struct dynamic_sa_ctl *sa_out; struct dynamic_sa_ctl *sa_out;
dma_addr_t sa_out_dma_addr; __le32 iv_nonce;
struct sa_state_record *state_record;
dma_addr_t state_record_dma_addr;
u32 sa_len; u32 sa_len;
}; };
...@@ -159,7 +156,6 @@ static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg( ...@@ -159,7 +156,6 @@ static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg(
int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size); int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
void crypto4xx_free_sa(struct crypto4xx_ctx *ctx); void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx); void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
u32 crypto4xx_alloc_state_record(struct crypto4xx_ctx *ctx);
int crypto4xx_build_pd(struct crypto_async_request *req, int crypto4xx_build_pd(struct crypto_async_request *req,
struct crypto4xx_ctx *ctx, struct crypto4xx_ctx *ctx,
struct scatterlist *src, struct scatterlist *src,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment