Commit 871df319 authored by Antoine Ténart's avatar Antoine Ténart Committed by Herbert Xu

crypto: inside-secure - EIP97 support

The Inside Secure SafeXcel driver was firstly designed to support the
EIP197 cryptographic engine which is an evolution (with much more
feature, better performances) of the EIP97 cryptographic engine. This
patch convert the Inside Secure SafeXcel driver to support both engines
(EIP97 + EIP197).

The main differences are the register offsets and the context
invalidation process which is EIP197 specific. This patch adds an
indirection on the register offsets and adds checks not to send any
invalidation request when driving the EIP97. A new compatible is added
as well to bind the driver from device trees.
Signed-off-by: default avatarAntoine Tenart <antoine.tenart@free-electrons.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 8732b298
This diff is collapsed.
...@@ -28,55 +28,94 @@ ...@@ -28,55 +28,94 @@
#define EIP197_GFP_FLAGS(base) ((base).flags & CRYPTO_TFM_REQ_MAY_SLEEP ? \ #define EIP197_GFP_FLAGS(base) ((base).flags & CRYPTO_TFM_REQ_MAY_SLEEP ? \
GFP_KERNEL : GFP_ATOMIC) GFP_KERNEL : GFP_ATOMIC)
/* Register base offsets */
#define EIP197_HIA_AIC(priv) ((priv)->base + (priv)->offsets.hia_aic)
#define EIP197_HIA_AIC_G(priv) ((priv)->base + (priv)->offsets.hia_aic_g)
#define EIP197_HIA_AIC_R(priv) ((priv)->base + (priv)->offsets.hia_aic_r)
#define EIP197_HIA_AIC_xDR(priv) ((priv)->base + (priv)->offsets.hia_aic_xdr)
#define EIP197_HIA_DFE(priv) ((priv)->base + (priv)->offsets.hia_dfe)
#define EIP197_HIA_DFE_THR(priv) ((priv)->base + (priv)->offsets.hia_dfe_thr)
#define EIP197_HIA_DSE(priv) ((priv)->base + (priv)->offsets.hia_dse)
#define EIP197_HIA_DSE_THR(priv) ((priv)->base + (priv)->offsets.hia_dse_thr)
#define EIP197_HIA_GEN_CFG(priv) ((priv)->base + (priv)->offsets.hia_gen_cfg)
#define EIP197_PE(priv) ((priv)->base + (priv)->offsets.pe)
/* EIP197 base offsets */
#define EIP197_HIA_AIC_BASE 0x90000
#define EIP197_HIA_AIC_G_BASE 0x90000
#define EIP197_HIA_AIC_R_BASE 0x90800
#define EIP197_HIA_AIC_xDR_BASE 0x80000
#define EIP197_HIA_DFE_BASE 0x8c000
#define EIP197_HIA_DFE_THR_BASE 0x8c040
#define EIP197_HIA_DSE_BASE 0x8d000
#define EIP197_HIA_DSE_THR_BASE 0x8d040
#define EIP197_HIA_GEN_CFG_BASE 0xf0000
#define EIP197_PE_BASE 0xa0000
/* EIP97 base offsets */
#define EIP97_HIA_AIC_BASE 0x0
#define EIP97_HIA_AIC_G_BASE 0x0
#define EIP97_HIA_AIC_R_BASE 0x0
#define EIP97_HIA_AIC_xDR_BASE 0x0
#define EIP97_HIA_DFE_BASE 0xf000
#define EIP97_HIA_DFE_THR_BASE 0xf200
#define EIP97_HIA_DSE_BASE 0xf400
#define EIP97_HIA_DSE_THR_BASE 0xf600
#define EIP97_HIA_GEN_CFG_BASE 0x10000
#define EIP97_PE_BASE 0x10000
/* CDR/RDR register offsets */ /* CDR/RDR register offsets */
#define EIP197_HIA_xDR_OFF(r) (0x80000 + (r) * 0x1000) #define EIP197_HIA_xDR_OFF(priv, r) (EIP197_HIA_AIC_xDR(priv) + (r) * 0x1000)
#define EIP197_HIA_CDR(r) (EIP197_HIA_xDR_OFF(r)) #define EIP197_HIA_CDR(priv, r) (EIP197_HIA_xDR_OFF(priv, r))
#define EIP197_HIA_RDR(r) (EIP197_HIA_xDR_OFF(r) + 0x800) #define EIP197_HIA_RDR(priv, r) (EIP197_HIA_xDR_OFF(priv, r) + 0x800)
#define EIP197_HIA_xDR_RING_BASE_ADDR_LO 0x0 #define EIP197_HIA_xDR_RING_BASE_ADDR_LO 0x0000
#define EIP197_HIA_xDR_RING_BASE_ADDR_HI 0x4 #define EIP197_HIA_xDR_RING_BASE_ADDR_HI 0x0004
#define EIP197_HIA_xDR_RING_SIZE 0x18 #define EIP197_HIA_xDR_RING_SIZE 0x0018
#define EIP197_HIA_xDR_DESC_SIZE 0x1c #define EIP197_HIA_xDR_DESC_SIZE 0x001c
#define EIP197_HIA_xDR_CFG 0x20 #define EIP197_HIA_xDR_CFG 0x0020
#define EIP197_HIA_xDR_DMA_CFG 0x24 #define EIP197_HIA_xDR_DMA_CFG 0x0024
#define EIP197_HIA_xDR_THRESH 0x28 #define EIP197_HIA_xDR_THRESH 0x0028
#define EIP197_HIA_xDR_PREP_COUNT 0x2c #define EIP197_HIA_xDR_PREP_COUNT 0x002c
#define EIP197_HIA_xDR_PROC_COUNT 0x30 #define EIP197_HIA_xDR_PROC_COUNT 0x0030
#define EIP197_HIA_xDR_PREP_PNTR 0x34 #define EIP197_HIA_xDR_PREP_PNTR 0x0034
#define EIP197_HIA_xDR_PROC_PNTR 0x38 #define EIP197_HIA_xDR_PROC_PNTR 0x0038
#define EIP197_HIA_xDR_STAT 0x3c #define EIP197_HIA_xDR_STAT 0x003c
/* register offsets */ /* register offsets */
#define EIP197_HIA_DFE_CFG 0x8c000 #define EIP197_HIA_DFE_CFG 0x0000
#define EIP197_HIA_DFE_THR_CTRL 0x8c040 #define EIP197_HIA_DFE_THR_CTRL 0x0000
#define EIP197_HIA_DFE_THR_STAT 0x8c044 #define EIP197_HIA_DFE_THR_STAT 0x0004
#define EIP197_HIA_DSE_CFG 0x8d000 #define EIP197_HIA_DSE_CFG 0x0000
#define EIP197_HIA_DSE_THR_CTRL 0x8d040 #define EIP197_HIA_DSE_THR_CTRL 0x0000
#define EIP197_HIA_DSE_THR_STAT 0x8d044 #define EIP197_HIA_DSE_THR_STAT 0x0004
#define EIP197_HIA_RA_PE_CTRL 0x90010 #define EIP197_HIA_RA_PE_CTRL 0x0010
#define EIP197_HIA_RA_PE_STAT 0x90014 #define EIP197_HIA_RA_PE_STAT 0x0014
#define EIP197_HIA_AIC_R_OFF(r) ((r) * 0x1000) #define EIP197_HIA_AIC_R_OFF(r) ((r) * 0x1000)
#define EIP197_HIA_AIC_R_ENABLE_CTRL(r) (0x9e808 - EIP197_HIA_AIC_R_OFF(r)) #define EIP197_HIA_AIC_R_ENABLE_CTRL(r) (0xe008 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0x9e810 - EIP197_HIA_AIC_R_OFF(r)) #define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ACK(r) (0x9e810 - EIP197_HIA_AIC_R_OFF(r)) #define EIP197_HIA_AIC_R_ACK(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0x9e814 - EIP197_HIA_AIC_R_OFF(r)) #define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0xe014 - EIP197_HIA_AIC_R_OFF(r))
#define EIP197_HIA_AIC_G_ENABLE_CTRL 0x9f808 #define EIP197_HIA_AIC_G_ENABLE_CTRL 0xf808
#define EIP197_HIA_AIC_G_ENABLED_STAT 0x9f810 #define EIP197_HIA_AIC_G_ENABLED_STAT 0xf810
#define EIP197_HIA_AIC_G_ACK 0x9f810 #define EIP197_HIA_AIC_G_ACK 0xf810
#define EIP197_HIA_MST_CTRL 0x9fff4 #define EIP197_HIA_MST_CTRL 0xfff4
#define EIP197_HIA_OPTIONS 0x9fff8 #define EIP197_HIA_OPTIONS 0xfff8
#define EIP197_HIA_VERSION 0x9fffc #define EIP197_HIA_VERSION 0xfffc
#define EIP197_PE_IN_DBUF_THRES 0xa0000 #define EIP197_PE_IN_DBUF_THRES 0x0000
#define EIP197_PE_IN_TBUF_THRES 0xa0100 #define EIP197_PE_IN_TBUF_THRES 0x0100
#define EIP197_PE_ICE_SCRATCH_RAM 0xa0800 #define EIP197_PE_ICE_SCRATCH_RAM 0x0800
#define EIP197_PE_ICE_PUE_CTRL 0xa0c80 #define EIP197_PE_ICE_PUE_CTRL 0x0c80
#define EIP197_PE_ICE_SCRATCH_CTRL 0xa0d04 #define EIP197_PE_ICE_SCRATCH_CTRL 0x0d04
#define EIP197_PE_ICE_FPP_CTRL 0xa0d80 #define EIP197_PE_ICE_FPP_CTRL 0x0d80
#define EIP197_PE_ICE_RAM_CTRL 0xa0ff0 #define EIP197_PE_ICE_RAM_CTRL 0x0ff0
#define EIP197_PE_EIP96_FUNCTION_EN 0xa1004 #define EIP197_PE_EIP96_FUNCTION_EN 0x1004
#define EIP197_PE_EIP96_CONTEXT_CTRL 0xa1008 #define EIP197_PE_EIP96_CONTEXT_CTRL 0x1008
#define EIP197_PE_EIP96_CONTEXT_STAT 0xa100c #define EIP197_PE_EIP96_CONTEXT_STAT 0x100c
#define EIP197_PE_OUT_DBUF_THRES 0xa1c00 #define EIP197_PE_OUT_DBUF_THRES 0x1c00
#define EIP197_PE_OUT_TBUF_THRES 0xa1d00 #define EIP197_PE_OUT_TBUF_THRES 0x1d00
#define EIP197_MST_CTRL 0xfff4
/* EIP197-specific registers, no indirection */
#define EIP197_CLASSIFICATION_RAMS 0xe0000 #define EIP197_CLASSIFICATION_RAMS 0xe0000
#define EIP197_TRC_CTRL 0xf0800 #define EIP197_TRC_CTRL 0xf0800
#define EIP197_TRC_LASTRES 0xf0804 #define EIP197_TRC_LASTRES 0xf0804
...@@ -90,7 +129,6 @@ ...@@ -90,7 +129,6 @@
#define EIP197_TRC_ECCDATASTAT 0xf083c #define EIP197_TRC_ECCDATASTAT 0xf083c
#define EIP197_TRC_ECCDATA 0xf0840 #define EIP197_TRC_ECCDATA 0xf0840
#define EIP197_CS_RAM_CTRL 0xf7ff0 #define EIP197_CS_RAM_CTRL 0xf7ff0
#define EIP197_MST_CTRL 0xffff4
/* EIP197_HIA_xDR_DESC_SIZE */ /* EIP197_HIA_xDR_DESC_SIZE */
#define EIP197_xDR_DESC_MODE_64BIT BIT(31) #define EIP197_xDR_DESC_MODE_64BIT BIT(31)
...@@ -465,12 +503,33 @@ struct safexcel_work_data { ...@@ -465,12 +503,33 @@ struct safexcel_work_data {
int ring; int ring;
}; };
enum safexcel_eip_version {
EIP97,
EIP197,
};
struct safexcel_register_offsets {
u32 hia_aic;
u32 hia_aic_g;
u32 hia_aic_r;
u32 hia_aic_xdr;
u32 hia_dfe;
u32 hia_dfe_thr;
u32 hia_dse;
u32 hia_dse_thr;
u32 hia_gen_cfg;
u32 pe;
};
struct safexcel_crypto_priv { struct safexcel_crypto_priv {
void __iomem *base; void __iomem *base;
struct device *dev; struct device *dev;
struct clk *clk; struct clk *clk;
struct safexcel_config config; struct safexcel_config config;
enum safexcel_eip_version version;
struct safexcel_register_offsets offsets;
/* context DMA pool */ /* context DMA pool */
struct dma_pool *context_pool; struct dma_pool *context_pool;
......
...@@ -69,6 +69,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key, ...@@ -69,6 +69,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key,
{ {
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->priv;
struct crypto_aes_ctx aes; struct crypto_aes_ctx aes;
int ret, i; int ret, i;
...@@ -78,7 +79,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key, ...@@ -78,7 +79,7 @@ static int safexcel_aes_setkey(struct crypto_skcipher *ctfm, const u8 *key,
return ret; return ret;
} }
if (ctx->base.ctxr_dma) { if (priv->version == EIP197 && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) { for (i = 0; i < len / sizeof(u32); i++) {
if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
...@@ -411,9 +412,13 @@ static int safexcel_send(struct crypto_async_request *async, ...@@ -411,9 +412,13 @@ static int safexcel_send(struct crypto_async_request *async,
int *commands, int *results) int *commands, int *results)
{ {
struct skcipher_request *req = skcipher_request_cast(async); struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
struct safexcel_crypto_priv *priv = ctx->priv;
int ret; int ret;
BUG_ON(priv->version == EIP97 && sreq->needs_inv);
if (sreq->needs_inv) if (sreq->needs_inv)
ret = safexcel_cipher_send_inv(async, ring, request, ret = safexcel_cipher_send_inv(async, ring, request,
commands, results); commands, results);
...@@ -476,7 +481,7 @@ static int safexcel_aes(struct skcipher_request *req, ...@@ -476,7 +481,7 @@ static int safexcel_aes(struct skcipher_request *req,
ctx->mode = mode; ctx->mode = mode;
if (ctx->base.ctxr) { if (ctx->base.ctxr) {
if (ctx->base.needs_inv) { if (priv->version == EIP197 && ctx->base.needs_inv) {
sreq->needs_inv = true; sreq->needs_inv = true;
ctx->base.needs_inv = false; ctx->base.needs_inv = false;
} }
...@@ -544,9 +549,14 @@ static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm) ...@@ -544,9 +549,14 @@ static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
memzero_explicit(ctx->base.ctxr->data, 8 * sizeof(u32)); memzero_explicit(ctx->base.ctxr->data, 8 * sizeof(u32));
ret = safexcel_cipher_exit_inv(tfm); if (priv->version == EIP197) {
if (ret) ret = safexcel_cipher_exit_inv(tfm);
dev_warn(priv->dev, "cipher: invalidation error %d\n", ret); if (ret)
dev_warn(priv->dev, "cipher: invalidation error %d\n", ret);
} else {
dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx->base.ctxr_dma);
}
} }
struct safexcel_alg_template safexcel_alg_ecb_aes = { struct safexcel_alg_template safexcel_alg_ecb_aes = {
......
...@@ -397,6 +397,8 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, ...@@ -397,6 +397,8 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
struct safexcel_ahash_req *req = ahash_request_ctx(areq); struct safexcel_ahash_req *req = ahash_request_ctx(areq);
int err; int err;
BUG_ON(priv->version == EIP97 && req->needs_inv);
if (req->needs_inv) { if (req->needs_inv) {
req->needs_inv = false; req->needs_inv = false;
err = safexcel_handle_inv_result(priv, ring, async, err = safexcel_handle_inv_result(priv, ring, async,
...@@ -528,7 +530,8 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq) ...@@ -528,7 +530,8 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
req->needs_inv = false; req->needs_inv = false;
if (ctx->base.ctxr) { if (ctx->base.ctxr) {
if (!ctx->base.needs_inv && req->processed && if (priv->version == EIP197 &&
!ctx->base.needs_inv && req->processed &&
ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
/* We're still setting needs_inv here, even though it is /* We're still setting needs_inv here, even though it is
* cleared right away, because the needs_inv flag can be * cleared right away, because the needs_inv flag can be
...@@ -721,9 +724,14 @@ static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm) ...@@ -721,9 +724,14 @@ static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
if (!ctx->base.ctxr) if (!ctx->base.ctxr)
return; return;
ret = safexcel_ahash_exit_inv(tfm); if (priv->version == EIP197) {
if (ret) ret = safexcel_ahash_exit_inv(tfm);
dev_warn(priv->dev, "hash: invalidation error %d\n", ret); if (ret)
dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
} else {
dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx->base.ctxr_dma);
}
} }
struct safexcel_alg_template safexcel_alg_sha1 = { struct safexcel_alg_template safexcel_alg_sha1 = {
...@@ -927,6 +935,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -927,6 +935,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
struct safexcel_crypto_priv *priv = ctx->priv;
struct safexcel_ahash_export_state istate, ostate; struct safexcel_ahash_export_state istate, ostate;
int ret, i; int ret, i;
...@@ -934,7 +943,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -934,7 +943,7 @@ static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
if (ret) if (ret)
return ret; return ret;
if (ctx->base.ctxr) { if (priv->version == EIP197 && ctx->base.ctxr) {
for (i = 0; i < SHA1_DIGEST_SIZE / sizeof(u32); i++) { for (i = 0; i < SHA1_DIGEST_SIZE / sizeof(u32); i++) {
if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) || if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) ||
ctx->opad[i] != le32_to_cpu(ostate.state[i])) { ctx->opad[i] != le32_to_cpu(ostate.state[i])) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment