Commit 78cf1c8b authored by Herbert Xu's avatar Herbert Xu

crypto: inside-secure - Move ipad/opad into safexcel_context

As both safexcel_ahash_ctx and safexcel_cipher_ctx contain ipad
and opad buffers this patch moves them into the common struct
safexcel_context.  It also adds a union so that they can be accessed
in the appropriate endian without crazy casts.
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 18e51895
...@@ -12,7 +12,9 @@ ...@@ -12,7 +12,9 @@
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include <crypto/sha3.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
#include <linux/types.h>
#define EIP197_HIA_VERSION_BE 0xca35 #define EIP197_HIA_VERSION_BE 0xca35
#define EIP197_HIA_VERSION_LE 0x35ca #define EIP197_HIA_VERSION_LE 0x35ca
...@@ -835,6 +837,13 @@ struct safexcel_context { ...@@ -835,6 +837,13 @@ struct safexcel_context {
struct safexcel_crypto_priv *priv; struct safexcel_crypto_priv *priv;
dma_addr_t ctxr_dma; dma_addr_t ctxr_dma;
union {
__le32 le[SHA3_512_BLOCK_SIZE / 4];
__be32 be[SHA3_512_BLOCK_SIZE / 4];
u32 word[SHA3_512_BLOCK_SIZE / 4];
u8 byte[SHA3_512_BLOCK_SIZE];
} ipad, opad;
int ring; int ring;
bool needs_inv; bool needs_inv;
bool exit_inv; bool exit_inv;
......
...@@ -61,8 +61,6 @@ struct safexcel_cipher_ctx { ...@@ -61,8 +61,6 @@ struct safexcel_cipher_ctx {
/* All the below is AEAD specific */ /* All the below is AEAD specific */
u32 hash_alg; u32 hash_alg;
u32 state_sz; u32 state_sz;
__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
struct crypto_cipher *hkaes; struct crypto_cipher *hkaes;
struct crypto_aead *fback; struct crypto_aead *fback;
...@@ -500,8 +498,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -500,8 +498,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
} }
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma && if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
(memcmp(ctx->ipad, istate.state, ctx->state_sz) || (memcmp(&ctx->base.ipad, istate.state, ctx->state_sz) ||
memcmp(ctx->opad, ostate.state, ctx->state_sz))) memcmp(&ctx->base.opad, ostate.state, ctx->state_sz)))
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
/* Now copy the keys into the context */ /* Now copy the keys into the context */
...@@ -509,8 +507,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -509,8 +507,8 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]); ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
ctx->key_len = keys.enckeylen; ctx->key_len = keys.enckeylen;
memcpy(ctx->ipad, &istate.state, ctx->state_sz); memcpy(&ctx->base.ipad, &istate.state, ctx->state_sz);
memcpy(ctx->opad, &ostate.state, ctx->state_sz); memcpy(&ctx->base.opad, &ostate.state, ctx->state_sz);
memzero_explicit(&keys, sizeof(keys)); memzero_explicit(&keys, sizeof(keys));
return 0; return 0;
...@@ -718,10 +716,10 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring, ...@@ -718,10 +716,10 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
totlen_dst += digestsize; totlen_dst += digestsize;
memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32), memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
ctx->ipad, ctx->state_sz); &ctx->base.ipad, ctx->state_sz);
if (!ctx->xcm) if (!ctx->xcm)
memcpy(ctx->base.ctxr->data + (ctx->key_len + memcpy(ctx->base.ctxr->data + (ctx->key_len +
ctx->state_sz) / sizeof(u32), ctx->opad, ctx->state_sz) / sizeof(u32), &ctx->base.opad,
ctx->state_sz); ctx->state_sz);
} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) && } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
(sreq->direction == SAFEXCEL_DECRYPT)) { (sreq->direction == SAFEXCEL_DECRYPT)) {
...@@ -2618,7 +2616,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2618,7 +2616,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) { if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
ctx->base.needs_inv = true; ctx->base.needs_inv = true;
break; break;
} }
...@@ -2626,7 +2624,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2626,7 +2624,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
} }
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
ctx->ipad[i] = cpu_to_be32(hashkey[i]); ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
memzero_explicit(hashkey, AES_BLOCK_SIZE); memzero_explicit(hashkey, AES_BLOCK_SIZE);
memzero_explicit(&aes, sizeof(aes)); memzero_explicit(&aes, sizeof(aes));
...@@ -2714,7 +2712,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key, ...@@ -2714,7 +2712,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
for (i = 0; i < len / sizeof(u32); i++) { for (i = 0; i < len / sizeof(u32); i++) {
ctx->key[i] = cpu_to_le32(aes.key_enc[i]); ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] = ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
cpu_to_be32(aes.key_enc[i]); cpu_to_be32(aes.key_enc[i]);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment