Commit a13ed1d1 authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: aesni - prevent misaligned buffers on the stack

The GCM mode driver uses 16 byte aligned buffers on the stack to pass
the IV to the asm helpers, but unfortunately, the x86 port does not
guarantee that the stack pointer is 16 byte aligned upon entry in the
first place. Since the compiler is not aware of this, it will not emit
the additional stack realignment sequence that is needed, and so the
alignment is not guaranteed to be more than 8 bytes.

So instead, allocate some padding on the stack, and realign the IV
pointer by hand.

Cc: <stable@vger.kernel.org>
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 4f1a02e7
...@@ -710,7 +710,8 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, ...@@ -710,7 +710,8 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
unsigned long auth_tag_len = crypto_aead_authsize(tfm); unsigned long auth_tag_len = crypto_aead_authsize(tfm);
const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm; const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
struct gcm_context_data data AESNI_ALIGN_ATTR; u8 databuf[sizeof(struct gcm_context_data) + (AESNI_ALIGN - 8)] __aligned(8);
struct gcm_context_data *data = PTR_ALIGN((void *)databuf, AESNI_ALIGN);
struct scatter_walk dst_sg_walk = {}; struct scatter_walk dst_sg_walk = {};
unsigned long left = req->cryptlen; unsigned long left = req->cryptlen;
unsigned long len, srclen, dstlen; unsigned long len, srclen, dstlen;
...@@ -759,8 +760,7 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, ...@@ -759,8 +760,7 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
} }
kernel_fpu_begin(); kernel_fpu_begin();
gcm_tfm->init(aes_ctx, &data, iv, gcm_tfm->init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
hash_subkey, assoc, assoclen);
if (req->src != req->dst) { if (req->src != req->dst) {
while (left) { while (left) {
src = scatterwalk_map(&src_sg_walk); src = scatterwalk_map(&src_sg_walk);
...@@ -770,10 +770,10 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, ...@@ -770,10 +770,10 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
len = min(srclen, dstlen); len = min(srclen, dstlen);
if (len) { if (len) {
if (enc) if (enc)
gcm_tfm->enc_update(aes_ctx, &data, gcm_tfm->enc_update(aes_ctx, data,
dst, src, len); dst, src, len);
else else
gcm_tfm->dec_update(aes_ctx, &data, gcm_tfm->dec_update(aes_ctx, data,
dst, src, len); dst, src, len);
} }
left -= len; left -= len;
...@@ -791,10 +791,10 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, ...@@ -791,10 +791,10 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
len = scatterwalk_clamp(&src_sg_walk, left); len = scatterwalk_clamp(&src_sg_walk, left);
if (len) { if (len) {
if (enc) if (enc)
gcm_tfm->enc_update(aes_ctx, &data, gcm_tfm->enc_update(aes_ctx, data,
src, src, len); src, src, len);
else else
gcm_tfm->dec_update(aes_ctx, &data, gcm_tfm->dec_update(aes_ctx, data,
src, src, len); src, src, len);
} }
left -= len; left -= len;
...@@ -803,7 +803,7 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, ...@@ -803,7 +803,7 @@ static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
scatterwalk_done(&src_sg_walk, 1, left); scatterwalk_done(&src_sg_walk, 1, left);
} }
} }
gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len); gcm_tfm->finalize(aes_ctx, data, authTag, auth_tag_len);
kernel_fpu_end(); kernel_fpu_end();
if (!assocmem) if (!assocmem)
...@@ -852,7 +852,8 @@ static int helper_rfc4106_encrypt(struct aead_request *req) ...@@ -852,7 +852,8 @@ static int helper_rfc4106_encrypt(struct aead_request *req)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
void *aes_ctx = &(ctx->aes_key_expanded); void *aes_ctx = &(ctx->aes_key_expanded);
u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
unsigned int i; unsigned int i;
__be32 counter = cpu_to_be32(1); __be32 counter = cpu_to_be32(1);
...@@ -879,7 +880,8 @@ static int helper_rfc4106_decrypt(struct aead_request *req) ...@@ -879,7 +880,8 @@ static int helper_rfc4106_decrypt(struct aead_request *req)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
void *aes_ctx = &(ctx->aes_key_expanded); void *aes_ctx = &(ctx->aes_key_expanded);
u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
unsigned int i; unsigned int i;
if (unlikely(req->assoclen != 16 && req->assoclen != 20)) if (unlikely(req->assoclen != 16 && req->assoclen != 20))
...@@ -1149,7 +1151,8 @@ static int generic_gcmaes_encrypt(struct aead_request *req) ...@@ -1149,7 +1151,8 @@ static int generic_gcmaes_encrypt(struct aead_request *req)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
void *aes_ctx = &(ctx->aes_key_expanded); void *aes_ctx = &(ctx->aes_key_expanded);
u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
__be32 counter = cpu_to_be32(1); __be32 counter = cpu_to_be32(1);
memcpy(iv, req->iv, 12); memcpy(iv, req->iv, 12);
...@@ -1165,7 +1168,8 @@ static int generic_gcmaes_decrypt(struct aead_request *req) ...@@ -1165,7 +1168,8 @@ static int generic_gcmaes_decrypt(struct aead_request *req)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
void *aes_ctx = &(ctx->aes_key_expanded); void *aes_ctx = &(ctx->aes_key_expanded);
u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
memcpy(iv, req->iv, 12); memcpy(iv, req->iv, 12);
*((__be32 *)(iv+12)) = counter; *((__be32 *)(iv+12)) = counter;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment