Commit a0aae821 authored by Christian Lamparter's avatar Christian Lamparter Committed by Herbert Xu

crypto: crypto4xx - prepare for AEAD support

This patch enhances existing interfaces and
functions to support AEAD ciphers in the next
patches.
Signed-off-by: default avatarChristian Lamparter <chunkeey@gmail.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 2f77690d
...@@ -315,6 +315,10 @@ config CRYPTO_DEV_PPC4XX ...@@ -315,6 +315,10 @@ config CRYPTO_DEV_PPC4XX
tristate "Driver AMCC PPC4xx crypto accelerator" tristate "Driver AMCC PPC4xx crypto accelerator"
depends on PPC && 4xx depends on PPC && 4xx
select CRYPTO_HASH select CRYPTO_HASH
select CRYPTO_AEAD
select CRYPTO_AES
select CRYPTO_CCM
select CRYPTO_GCM
select CRYPTO_BLKCIPHER select CRYPTO_BLKCIPHER
help help
This option allows you to have support for AMCC crypto acceleration. This option allows you to have support for AMCC crypto acceleration.
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <linux/dma-mapping.h> #include <linux/dma-mapping.h>
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <crypto/aead.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include <crypto/ctr.h> #include <crypto/ctr.h>
...@@ -83,7 +84,7 @@ int crypto4xx_encrypt(struct ablkcipher_request *req) ...@@ -83,7 +84,7 @@ int crypto4xx_encrypt(struct ablkcipher_request *req)
crypto4xx_memcpy_to_le32(iv, req->info, ivlen); crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len); req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len, 0);
} }
int crypto4xx_decrypt(struct ablkcipher_request *req) int crypto4xx_decrypt(struct ablkcipher_request *req)
...@@ -97,7 +98,7 @@ int crypto4xx_decrypt(struct ablkcipher_request *req) ...@@ -97,7 +98,7 @@ int crypto4xx_decrypt(struct ablkcipher_request *req)
crypto4xx_memcpy_to_le32(iv, req->info, ivlen); crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len); req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len, 0);
} }
/** /**
...@@ -213,7 +214,7 @@ int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req) ...@@ -213,7 +214,7 @@ int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req)
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
req->nbytes, iv, AES_IV_SIZE, req->nbytes, iv, AES_IV_SIZE,
ctx->sa_out, ctx->sa_len); ctx->sa_out, ctx->sa_len, 0);
} }
int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req) int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
...@@ -227,7 +228,7 @@ int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req) ...@@ -227,7 +228,7 @@ int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
req->nbytes, iv, AES_IV_SIZE, req->nbytes, iv, AES_IV_SIZE,
ctx->sa_out, ctx->sa_len); ctx->sa_out, ctx->sa_len, 0);
} }
/** /**
...@@ -239,11 +240,13 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, ...@@ -239,11 +240,13 @@ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
unsigned char hm) unsigned char hm)
{ {
struct crypto_alg *alg = tfm->__crt_alg; struct crypto_alg *alg = tfm->__crt_alg;
struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg); struct crypto4xx_alg *my_alg;
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
struct dynamic_sa_hash160 *sa; struct dynamic_sa_hash160 *sa;
int rc; int rc;
my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
alg.u.hash);
ctx->dev = my_alg->dev; ctx->dev = my_alg->dev;
/* Create SA */ /* Create SA */
...@@ -300,7 +303,7 @@ int crypto4xx_hash_update(struct ahash_request *req) ...@@ -300,7 +303,7 @@ int crypto4xx_hash_update(struct ahash_request *req)
return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
req->nbytes, NULL, 0, ctx->sa_in, req->nbytes, NULL, 0, ctx->sa_in,
ctx->sa_len); ctx->sa_len, 0);
} }
int crypto4xx_hash_final(struct ahash_request *req) int crypto4xx_hash_final(struct ahash_request *req)
...@@ -319,7 +322,7 @@ int crypto4xx_hash_digest(struct ahash_request *req) ...@@ -319,7 +322,7 @@ int crypto4xx_hash_digest(struct ahash_request *req)
return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
req->nbytes, NULL, 0, ctx->sa_in, req->nbytes, NULL, 0, ctx->sa_in,
ctx->sa_len); ctx->sa_len, 0);
} }
/** /**
...@@ -330,5 +333,3 @@ int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm) ...@@ -330,5 +333,3 @@ int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1, return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
SA_HASH_MODE_HASH); SA_HASH_MODE_HASH);
} }
...@@ -35,10 +35,12 @@ ...@@ -35,10 +35,12 @@
#include <asm/dcr.h> #include <asm/dcr.h>
#include <asm/dcr-regs.h> #include <asm/dcr-regs.h>
#include <asm/cacheflush.h> #include <asm/cacheflush.h>
#include <crypto/aead.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/ctr.h> #include <crypto/ctr.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/internal/aead.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include "crypto4xx_reg_def.h" #include "crypto4xx_reg_def.h"
#include "crypto4xx_core.h" #include "crypto4xx_core.h"
...@@ -518,7 +520,7 @@ static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev, ...@@ -518,7 +520,7 @@ static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev,
} }
} }
static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev, static void crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
struct pd_uinfo *pd_uinfo, struct pd_uinfo *pd_uinfo,
struct ce_pd *pd) struct ce_pd *pd)
{ {
...@@ -543,11 +545,9 @@ static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev, ...@@ -543,11 +545,9 @@ static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
if (pd_uinfo->state & PD_ENTRY_BUSY) if (pd_uinfo->state & PD_ENTRY_BUSY)
ablkcipher_request_complete(ablk_req, -EINPROGRESS); ablkcipher_request_complete(ablk_req, -EINPROGRESS);
ablkcipher_request_complete(ablk_req, 0); ablkcipher_request_complete(ablk_req, 0);
return 0;
} }
static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev, static void crypto4xx_ahash_done(struct crypto4xx_device *dev,
struct pd_uinfo *pd_uinfo) struct pd_uinfo *pd_uinfo)
{ {
struct crypto4xx_ctx *ctx; struct crypto4xx_ctx *ctx;
...@@ -563,20 +563,88 @@ static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev, ...@@ -563,20 +563,88 @@ static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev,
if (pd_uinfo->state & PD_ENTRY_BUSY) if (pd_uinfo->state & PD_ENTRY_BUSY)
ahash_request_complete(ahash_req, -EINPROGRESS); ahash_request_complete(ahash_req, -EINPROGRESS);
ahash_request_complete(ahash_req, 0); ahash_request_complete(ahash_req, 0);
}
return 0; static void crypto4xx_aead_done(struct crypto4xx_device *dev,
struct pd_uinfo *pd_uinfo,
struct ce_pd *pd)
{
struct aead_request *aead_req;
struct crypto4xx_ctx *ctx;
struct scatterlist *dst = pd_uinfo->dest_va;
int err = 0;
aead_req = container_of(pd_uinfo->async_req, struct aead_request,
base);
ctx = crypto_tfm_ctx(aead_req->base.tfm);
if (pd_uinfo->using_sd) {
crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo,
pd->pd_ctl_len.bf.pkt_len,
dst);
} else {
__dma_sync_page(sg_page(dst), dst->offset, dst->length,
DMA_FROM_DEVICE);
}
if (pd_uinfo->sa_va->sa_command_0.bf.dir == DIR_OUTBOUND) {
/* append icv at the end */
size_t cp_len = crypto_aead_authsize(
crypto_aead_reqtfm(aead_req));
u32 icv[cp_len];
crypto4xx_memcpy_from_le32(icv, pd_uinfo->sr_va->save_digest,
cp_len);
scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen,
cp_len, 1);
}
crypto4xx_ret_sg_desc(dev, pd_uinfo);
if (pd->pd_ctl.bf.status & 0xff) {
if (pd->pd_ctl.bf.status & 0x1) {
/* authentication error */
err = -EBADMSG;
} else {
if (!__ratelimit(&dev->aead_ratelimit)) {
if (pd->pd_ctl.bf.status & 2)
pr_err("pad fail error\n");
if (pd->pd_ctl.bf.status & 4)
pr_err("seqnum fail\n");
if (pd->pd_ctl.bf.status & 8)
pr_err("error _notify\n");
pr_err("aead return err status = 0x%02x\n",
pd->pd_ctl.bf.status & 0xff);
pr_err("pd pad_ctl = 0x%08x\n",
pd->pd_ctl.bf.pd_pad_ctl);
}
err = -EINVAL;
}
}
if (pd_uinfo->state & PD_ENTRY_BUSY)
aead_request_complete(aead_req, -EINPROGRESS);
aead_request_complete(aead_req, err);
} }
static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx) static void crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
{ {
struct ce_pd *pd = &dev->pdr[idx]; struct ce_pd *pd = &dev->pdr[idx];
struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx]; struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) == switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
CRYPTO_ALG_TYPE_ABLKCIPHER) case CRYPTO_ALG_TYPE_ABLKCIPHER:
return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd); crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
else break;
return crypto4xx_ahash_done(dev, pd_uinfo); case CRYPTO_ALG_TYPE_AEAD:
crypto4xx_aead_done(dev, pd_uinfo, pd);
break;
case CRYPTO_ALG_TYPE_AHASH:
crypto4xx_ahash_done(dev, pd_uinfo);
break;
}
} }
static void crypto4xx_stop_all(struct crypto4xx_core_device *core_dev) static void crypto4xx_stop_all(struct crypto4xx_core_device *core_dev)
...@@ -612,8 +680,10 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -612,8 +680,10 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
const unsigned int datalen, const unsigned int datalen,
const __le32 *iv, const u32 iv_len, const __le32 *iv, const u32 iv_len,
const struct dynamic_sa_ctl *req_sa, const struct dynamic_sa_ctl *req_sa,
const unsigned int sa_len) const unsigned int sa_len,
const unsigned int assoclen)
{ {
struct scatterlist _dst[2];
struct crypto4xx_device *dev = ctx->dev; struct crypto4xx_device *dev = ctx->dev;
struct dynamic_sa_ctl *sa; struct dynamic_sa_ctl *sa;
struct ce_gd *gd; struct ce_gd *gd;
...@@ -627,18 +697,25 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -627,18 +697,25 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
unsigned int nbytes = datalen; unsigned int nbytes = datalen;
size_t offset_to_sr_ptr; size_t offset_to_sr_ptr;
u32 gd_idx = 0; u32 gd_idx = 0;
int tmp;
bool is_busy; bool is_busy;
/* figure how many gd is needed */ /* figure how many gd are needed */
num_gd = sg_nents_for_len(src, datalen); tmp = sg_nents_for_len(src, assoclen + datalen);
if ((int)num_gd < 0) { if (tmp < 0) {
dev_err(dev->core_dev->device, "Invalid number of src SG.\n"); dev_err(dev->core_dev->device, "Invalid number of src SG.\n");
return -EINVAL; return tmp;
}
if (tmp == 1)
tmp = 0;
num_gd = tmp;
if (assoclen) {
nbytes += assoclen;
dst = scatterwalk_ffwd(_dst, dst, assoclen);
} }
if (num_gd == 1)
num_gd = 0;
/* figure how many sd is needed */ /* figure how many sd are needed */
if (sg_is_last(dst)) { if (sg_is_last(dst)) {
num_sd = 0; num_sd = 0;
} else { } else {
...@@ -724,6 +801,7 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -724,6 +801,7 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
sa = pd_uinfo->sa_va; sa = pd_uinfo->sa_va;
memcpy(sa, req_sa, sa_len * 4); memcpy(sa, req_sa, sa_len * 4);
sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2);
offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa); offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
*(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa; *(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa;
...@@ -830,7 +908,7 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -830,7 +908,7 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH) | ((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH) |
(crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ? (crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ?
PD_CTL_HASH_FINAL : 0); PD_CTL_HASH_FINAL : 0);
pd->pd_ctl_len.w = 0x00400000 | datalen; pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen);
pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0); pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0);
wmb(); wmb();
...@@ -843,38 +921,66 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -843,38 +921,66 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
/** /**
* Algorithm Registration Functions * Algorithm Registration Functions
*/ */
static int crypto4xx_alg_init(struct crypto_tfm *tfm) static void crypto4xx_ctx_init(struct crypto4xx_alg *amcc_alg,
struct crypto4xx_ctx *ctx)
{ {
struct crypto_alg *alg = tfm->__crt_alg;
struct crypto4xx_alg *amcc_alg = crypto_alg_to_crypto4xx_alg(alg);
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
ctx->dev = amcc_alg->dev; ctx->dev = amcc_alg->dev;
ctx->sa_in = NULL; ctx->sa_in = NULL;
ctx->sa_out = NULL; ctx->sa_out = NULL;
ctx->sa_len = 0; ctx->sa_len = 0;
}
switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { static int crypto4xx_ablk_init(struct crypto_tfm *tfm)
default: {
struct crypto_alg *alg = tfm->__crt_alg;
struct crypto4xx_alg *amcc_alg;
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
crypto4xx_ctx_init(amcc_alg, ctx);
tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx); tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
break; return 0;
case CRYPTO_ALG_TYPE_AHASH: }
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct crypto4xx_ctx)); static void crypto4xx_common_exit(struct crypto4xx_ctx *ctx)
break; {
} crypto4xx_free_sa(ctx);
}
static void crypto4xx_ablk_exit(struct crypto_tfm *tfm)
{
crypto4xx_common_exit(crypto_tfm_ctx(tfm));
}
static int crypto4xx_aead_init(struct crypto_aead *tfm)
{
struct aead_alg *alg = crypto_aead_alg(tfm);
struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
struct crypto4xx_alg *amcc_alg;
ctx->sw_cipher.aead = crypto_alloc_aead(alg->base.cra_name, 0,
CRYPTO_ALG_NEED_FALLBACK |
CRYPTO_ALG_ASYNC);
if (IS_ERR(ctx->sw_cipher.aead))
return PTR_ERR(ctx->sw_cipher.aead);
amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.aead);
crypto4xx_ctx_init(amcc_alg, ctx);
crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
max(sizeof(struct crypto4xx_ctx), 32 +
crypto_aead_reqsize(ctx->sw_cipher.aead)));
return 0; return 0;
} }
static void crypto4xx_alg_exit(struct crypto_tfm *tfm) static void crypto4xx_aead_exit(struct crypto_aead *tfm)
{ {
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
crypto4xx_free_sa(ctx); crypto4xx_common_exit(ctx);
crypto_free_aead(ctx->sw_cipher.aead);
} }
int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, static int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
struct crypto4xx_alg_common *crypto_alg, struct crypto4xx_alg_common *crypto_alg,
int array_size) int array_size)
{ {
...@@ -891,6 +997,10 @@ int crypto4xx_register_alg(struct crypto4xx_device *sec_dev, ...@@ -891,6 +997,10 @@ int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
alg->dev = sec_dev; alg->dev = sec_dev;
switch (alg->alg.type) { switch (alg->alg.type) {
case CRYPTO_ALG_TYPE_AEAD:
rc = crypto_register_aead(&alg->alg.u.aead);
break;
case CRYPTO_ALG_TYPE_AHASH: case CRYPTO_ALG_TYPE_AHASH:
rc = crypto_register_ahash(&alg->alg.u.hash); rc = crypto_register_ahash(&alg->alg.u.hash);
break; break;
...@@ -920,6 +1030,10 @@ static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev) ...@@ -920,6 +1030,10 @@ static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev)
crypto_unregister_ahash(&alg->alg.u.hash); crypto_unregister_ahash(&alg->alg.u.hash);
break; break;
case CRYPTO_ALG_TYPE_AEAD:
crypto_unregister_aead(&alg->alg.u.aead);
break;
default: default:
crypto_unregister_alg(&alg->alg.u.cipher); crypto_unregister_alg(&alg->alg.u.cipher);
} }
...@@ -973,7 +1087,7 @@ static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data) ...@@ -973,7 +1087,7 @@ static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data)
/** /**
* Supported Crypto Algorithms * Supported Crypto Algorithms
*/ */
struct crypto4xx_alg_common crypto4xx_alg[] = { static struct crypto4xx_alg_common crypto4xx_alg[] = {
/* Crypto AES modes */ /* Crypto AES modes */
{ .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = { { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
.cra_name = "cbc(aes)", .cra_name = "cbc(aes)",
...@@ -985,8 +1099,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = { ...@@ -985,8 +1099,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = {
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init, .cra_init = crypto4xx_ablk_init,
.cra_exit = crypto4xx_alg_exit, .cra_exit = crypto4xx_ablk_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1009,8 +1123,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = { ...@@ -1009,8 +1123,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = {
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init, .cra_init = crypto4xx_ablk_init,
.cra_exit = crypto4xx_alg_exit, .cra_exit = crypto4xx_ablk_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1033,8 +1147,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = { ...@@ -1033,8 +1147,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = {
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init, .cra_init = crypto4xx_ablk_init,
.cra_exit = crypto4xx_alg_exit, .cra_exit = crypto4xx_ablk_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1059,8 +1173,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = { ...@@ -1059,8 +1173,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = {
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init, .cra_init = crypto4xx_ablk_init,
.cra_exit = crypto4xx_alg_exit, .cra_exit = crypto4xx_ablk_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1082,8 +1196,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = { ...@@ -1082,8 +1196,8 @@ struct crypto4xx_alg_common crypto4xx_alg[] = {
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto4xx_ctx), .cra_ctxsize = sizeof(struct crypto4xx_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
.cra_init = crypto4xx_alg_init, .cra_init = crypto4xx_ablk_init,
.cra_exit = crypto4xx_alg_exit, .cra_exit = crypto4xx_ablk_exit,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_u = { .cra_u = {
.ablkcipher = { .ablkcipher = {
...@@ -1149,6 +1263,7 @@ static int crypto4xx_probe(struct platform_device *ofdev) ...@@ -1149,6 +1263,7 @@ static int crypto4xx_probe(struct platform_device *ofdev)
core_dev->device = dev; core_dev->device = dev;
spin_lock_init(&core_dev->lock); spin_lock_init(&core_dev->lock);
INIT_LIST_HEAD(&core_dev->dev->alg_list); INIT_LIST_HEAD(&core_dev->dev->alg_list);
ratelimit_default_init(&core_dev->dev->aead_ratelimit);
rc = crypto4xx_build_pdr(core_dev->dev); rc = crypto4xx_build_pdr(core_dev->dev);
if (rc) if (rc)
goto err_build_pdr; goto err_build_pdr;
......
...@@ -22,7 +22,9 @@ ...@@ -22,7 +22,9 @@
#ifndef __CRYPTO4XX_CORE_H__ #ifndef __CRYPTO4XX_CORE_H__
#define __CRYPTO4XX_CORE_H__ #define __CRYPTO4XX_CORE_H__
#include <linux/ratelimit.h>
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/internal/aead.h>
#include "crypto4xx_reg_def.h" #include "crypto4xx_reg_def.h"
#include "crypto4xx_sa.h" #include "crypto4xx_sa.h"
...@@ -106,6 +108,7 @@ struct crypto4xx_device { ...@@ -106,6 +108,7 @@ struct crypto4xx_device {
struct pd_uinfo *pdr_uinfo; struct pd_uinfo *pdr_uinfo;
struct list_head alg_list; /* List of algorithm supported struct list_head alg_list; /* List of algorithm supported
by this device */ by this device */
struct ratelimit_state aead_ratelimit;
}; };
struct crypto4xx_core_device { struct crypto4xx_core_device {
...@@ -125,6 +128,9 @@ struct crypto4xx_ctx { ...@@ -125,6 +128,9 @@ struct crypto4xx_ctx {
struct dynamic_sa_ctl *sa_out; struct dynamic_sa_ctl *sa_out;
__le32 iv_nonce; __le32 iv_nonce;
u32 sa_len; u32 sa_len;
union {
struct crypto_aead *aead;
} sw_cipher;
}; };
struct crypto4xx_alg_common { struct crypto4xx_alg_common {
...@@ -132,6 +138,7 @@ struct crypto4xx_alg_common { ...@@ -132,6 +138,7 @@ struct crypto4xx_alg_common {
union { union {
struct crypto_alg cipher; struct crypto_alg cipher;
struct ahash_alg hash; struct ahash_alg hash;
struct aead_alg aead;
} u; } u;
}; };
...@@ -141,18 +148,6 @@ struct crypto4xx_alg { ...@@ -141,18 +148,6 @@ struct crypto4xx_alg {
struct crypto4xx_device *dev; struct crypto4xx_device *dev;
}; };
static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg(
struct crypto_alg *x)
{
switch (x->cra_flags & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_AHASH:
return container_of(__crypto_ahash_alg(x),
struct crypto4xx_alg, alg.u.hash);
}
return container_of(x, struct crypto4xx_alg, alg.u.cipher);
}
int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size); int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
void crypto4xx_free_sa(struct crypto4xx_ctx *ctx); void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx); void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
...@@ -163,7 +158,8 @@ int crypto4xx_build_pd(struct crypto_async_request *req, ...@@ -163,7 +158,8 @@ int crypto4xx_build_pd(struct crypto_async_request *req,
const unsigned int datalen, const unsigned int datalen,
const __le32 *iv, const u32 iv_len, const __le32 *iv, const u32 iv_len,
const struct dynamic_sa_ctl *sa, const struct dynamic_sa_ctl *sa,
const unsigned int sa_len); const unsigned int sa_len,
const unsigned int assoclen);
int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher, int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
const u8 *key, unsigned int keylen); const u8 *key, unsigned int keylen);
int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher, int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
......
...@@ -55,6 +55,8 @@ union dynamic_sa_contents { ...@@ -55,6 +55,8 @@ union dynamic_sa_contents {
#define SA_OP_GROUP_BASIC 0 #define SA_OP_GROUP_BASIC 0
#define SA_OPCODE_ENCRYPT 0 #define SA_OPCODE_ENCRYPT 0
#define SA_OPCODE_DECRYPT 0 #define SA_OPCODE_DECRYPT 0
#define SA_OPCODE_ENCRYPT_HASH 1
#define SA_OPCODE_HASH_DECRYPT 1
#define SA_OPCODE_HASH 3 #define SA_OPCODE_HASH 3
#define SA_CIPHER_ALG_DES 0 #define SA_CIPHER_ALG_DES 0
#define SA_CIPHER_ALG_3DES 1 #define SA_CIPHER_ALG_3DES 1
...@@ -65,6 +67,8 @@ union dynamic_sa_contents { ...@@ -65,6 +67,8 @@ union dynamic_sa_contents {
#define SA_HASH_ALG_MD5 0 #define SA_HASH_ALG_MD5 0
#define SA_HASH_ALG_SHA1 1 #define SA_HASH_ALG_SHA1 1
#define SA_HASH_ALG_GHASH 12
#define SA_HASH_ALG_CBC_MAC 14
#define SA_HASH_ALG_NULL 15 #define SA_HASH_ALG_NULL 15
#define SA_HASH_ALG_SHA1_DIGEST_SIZE 20 #define SA_HASH_ALG_SHA1_DIGEST_SIZE 20
...@@ -233,6 +237,36 @@ struct dynamic_sa_aes256 { ...@@ -233,6 +237,36 @@ struct dynamic_sa_aes256 {
#define SA_AES256_CONTENTS 0x3e000082 #define SA_AES256_CONTENTS 0x3e000082
#define SA_AES_CONTENTS 0x3e000002 #define SA_AES_CONTENTS 0x3e000002
/**
* Security Association (SA) for AES128 CCM
*/
struct dynamic_sa_aes128_ccm {
struct dynamic_sa_ctl ctrl;
__le32 key[4];
__le32 iv[4];
u32 state_ptr;
u32 reserved;
} __packed;
#define SA_AES128_CCM_LEN (sizeof(struct dynamic_sa_aes128_ccm)/4)
#define SA_AES128_CCM_CONTENTS 0x3e000042
#define SA_AES_CCM_CONTENTS 0x3e000002
/**
* Security Association (SA) for AES128_GCM
*/
struct dynamic_sa_aes128_gcm {
struct dynamic_sa_ctl ctrl;
__le32 key[4];
__le32 inner_digest[4];
__le32 iv[4];
u32 state_ptr;
u32 reserved;
} __packed;
#define SA_AES128_GCM_LEN (sizeof(struct dynamic_sa_aes128_gcm)/4)
#define SA_AES128_GCM_CONTENTS 0x3e000442
#define SA_AES_GCM_CONTENTS 0x3e000402
/** /**
* Security Association (SA) for HASH160: HMAC-SHA1 * Security Association (SA) for HASH160: HMAC-SHA1
*/ */
...@@ -274,4 +308,11 @@ static inline __le32 *get_dynamic_sa_key_field(struct dynamic_sa_ctl *cts) ...@@ -274,4 +308,11 @@ static inline __le32 *get_dynamic_sa_key_field(struct dynamic_sa_ctl *cts)
return (__le32 *) ((unsigned long)cts + sizeof(struct dynamic_sa_ctl)); return (__le32 *) ((unsigned long)cts + sizeof(struct dynamic_sa_ctl));
} }
static inline __le32 *get_dynamic_sa_inner_digest(struct dynamic_sa_ctl *cts)
{
return (__le32 *) ((unsigned long)cts +
sizeof(struct dynamic_sa_ctl) +
cts->sa_contents.bf.key_size * 4);
}
#endif #endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment