Commit 1c502e2e authored by Ayush Sawal's avatar Ayush Sawal Committed by Herbert Xu

crypto: chelsio - This fixes the libkcapi's cbc(aes) aio fail test cases

The libkcapi "cbc(aes)" failed tests are
symmetric asynchronous cipher one shot multiple test,
symmetric asynchronous cipher stream multiple test,
Symmetric asynchronous cipher vmsplice multiple test

In this patch a wait_for_completion is added in the chcr_aes_encrypt function,
which completes when the response of comes from the hardware.
This adds serialization for encryption in cbc(aes) aio case.
Signed-off-by: default avatarAyush Sawal <ayush.sawal@chelsio.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 4fb3d8ba
...@@ -1102,6 +1102,7 @@ static int chcr_handle_cipher_resp(struct skcipher_request *req, ...@@ -1102,6 +1102,7 @@ static int chcr_handle_cipher_resp(struct skcipher_request *req,
unsigned char *input, int err) unsigned char *input, int err)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct chcr_context *ctx = c_ctx(tfm);
struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm)); struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
struct sk_buff *skb; struct sk_buff *skb;
...@@ -1166,10 +1167,20 @@ static int chcr_handle_cipher_resp(struct skcipher_request *req, ...@@ -1166,10 +1167,20 @@ static int chcr_handle_cipher_resp(struct skcipher_request *req,
chcr_send_wr(skb); chcr_send_wr(skb);
reqctx->last_req_len = bytes; reqctx->last_req_len = bytes;
reqctx->processed += bytes; reqctx->processed += bytes;
if (get_cryptoalg_subtype(tfm) ==
CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
CRYPTO_TFM_REQ_MAY_SLEEP ) {
complete(&ctx->cbc_aes_aio_done);
}
return 0; return 0;
unmap: unmap:
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req); chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
complete: complete:
if (get_cryptoalg_subtype(tfm) ==
CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
CRYPTO_TFM_REQ_MAY_SLEEP ) {
complete(&ctx->cbc_aes_aio_done);
}
chcr_dec_wrcount(dev); chcr_dec_wrcount(dev);
req->base.complete(&req->base, err); req->base.complete(&req->base, err);
return err; return err;
...@@ -1289,6 +1300,7 @@ static int process_cipher(struct skcipher_request *req, ...@@ -1289,6 +1300,7 @@ static int process_cipher(struct skcipher_request *req,
static int chcr_aes_encrypt(struct skcipher_request *req) static int chcr_aes_encrypt(struct skcipher_request *req)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct chcr_context *ctx;
struct chcr_dev *dev = c_ctx(tfm)->dev; struct chcr_dev *dev = c_ctx(tfm)->dev;
struct sk_buff *skb = NULL; struct sk_buff *skb = NULL;
int err, isfull = 0; int err, isfull = 0;
...@@ -1313,6 +1325,12 @@ static int chcr_aes_encrypt(struct skcipher_request *req) ...@@ -1313,6 +1325,12 @@ static int chcr_aes_encrypt(struct skcipher_request *req)
skb->dev = u_ctx->lldi.ports[0]; skb->dev = u_ctx->lldi.ports[0];
set_wr_txq(skb, CPL_PRIORITY_DATA, c_ctx(tfm)->tx_qidx); set_wr_txq(skb, CPL_PRIORITY_DATA, c_ctx(tfm)->tx_qidx);
chcr_send_wr(skb); chcr_send_wr(skb);
if (get_cryptoalg_subtype(tfm) ==
CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
CRYPTO_TFM_REQ_MAY_SLEEP ) {
ctx=c_ctx(tfm);
wait_for_completion(&ctx->cbc_aes_aio_done);
}
return isfull ? -EBUSY : -EINPROGRESS; return isfull ? -EBUSY : -EINPROGRESS;
error: error:
chcr_dec_wrcount(dev); chcr_dec_wrcount(dev);
...@@ -1401,7 +1419,7 @@ static int chcr_init_tfm(struct crypto_skcipher *tfm) ...@@ -1401,7 +1419,7 @@ static int chcr_init_tfm(struct crypto_skcipher *tfm)
pr_err("failed to allocate fallback for %s\n", alg->base.cra_name); pr_err("failed to allocate fallback for %s\n", alg->base.cra_name);
return PTR_ERR(ablkctx->sw_cipher); return PTR_ERR(ablkctx->sw_cipher);
} }
init_completion(&ctx->cbc_aes_aio_done);
crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx)); crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx));
return chcr_device_init(ctx); return chcr_device_init(ctx);
......
...@@ -254,6 +254,7 @@ struct chcr_context { ...@@ -254,6 +254,7 @@ struct chcr_context {
unsigned char rx_qidx; unsigned char rx_qidx;
unsigned char tx_chan_id; unsigned char tx_chan_id;
unsigned char pci_chan_id; unsigned char pci_chan_id;
struct completion cbc_aes_aio_done;
struct __crypto_ctx crypto_ctx[0]; struct __crypto_ctx crypto_ctx[0];
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment