Commit 31dab719 authored by Linus Torvalds's avatar Linus Torvalds

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull ARM AES crypto fixes from Herbert Xu:
 "This push fixes a regression on ARM where odd-sized blocks supplied to
  AES may cause crashes"

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
  crypto: arm-aes - fix encryption of unaligned data
  crypto: arm64-aes - fix encryption of unaligned data
parents e8a91e0e f3c400ef
...@@ -137,7 +137,7 @@ static int aesbs_cbc_encrypt(struct blkcipher_desc *desc, ...@@ -137,7 +137,7 @@ static int aesbs_cbc_encrypt(struct blkcipher_desc *desc,
dst += AES_BLOCK_SIZE; dst += AES_BLOCK_SIZE;
} while (--blocks); } while (--blocks);
} }
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
return err; return err;
} }
...@@ -158,7 +158,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc, ...@@ -158,7 +158,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc,
bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
walk.nbytes, &ctx->dec, walk.iv); walk.nbytes, &ctx->dec, walk.iv);
kernel_neon_end(); kernel_neon_end();
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
while (walk.nbytes) { while (walk.nbytes) {
u32 blocks = walk.nbytes / AES_BLOCK_SIZE; u32 blocks = walk.nbytes / AES_BLOCK_SIZE;
...@@ -182,7 +182,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc, ...@@ -182,7 +182,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc,
dst += AES_BLOCK_SIZE; dst += AES_BLOCK_SIZE;
src += AES_BLOCK_SIZE; src += AES_BLOCK_SIZE;
} while (--blocks); } while (--blocks);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
return err; return err;
} }
...@@ -268,7 +268,7 @@ static int aesbs_xts_encrypt(struct blkcipher_desc *desc, ...@@ -268,7 +268,7 @@ static int aesbs_xts_encrypt(struct blkcipher_desc *desc,
bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
walk.nbytes, &ctx->enc, walk.iv); walk.nbytes, &ctx->enc, walk.iv);
kernel_neon_end(); kernel_neon_end();
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
return err; return err;
} }
...@@ -292,7 +292,7 @@ static int aesbs_xts_decrypt(struct blkcipher_desc *desc, ...@@ -292,7 +292,7 @@ static int aesbs_xts_decrypt(struct blkcipher_desc *desc,
bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
walk.nbytes, &ctx->dec, walk.iv); walk.nbytes, &ctx->dec, walk.iv);
kernel_neon_end(); kernel_neon_end();
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
return err; return err;
} }
......
...@@ -106,7 +106,7 @@ static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -106,7 +106,7 @@ static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key_enc, rounds, blocks, first); (u8 *)ctx->key_enc, rounds, blocks, first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
return err; return err;
...@@ -128,7 +128,7 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -128,7 +128,7 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key_dec, rounds, blocks, first); (u8 *)ctx->key_dec, rounds, blocks, first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
return err; return err;
...@@ -151,7 +151,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -151,7 +151,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key_enc, rounds, blocks, walk.iv, (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
first); first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
return err; return err;
...@@ -174,7 +174,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -174,7 +174,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key_dec, rounds, blocks, walk.iv, (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
first); first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
return err; return err;
...@@ -243,7 +243,7 @@ static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -243,7 +243,7 @@ static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key1.key_enc, rounds, blocks, (u8 *)ctx->key1.key_enc, rounds, blocks,
(u8 *)ctx->key2.key_enc, walk.iv, first); (u8 *)ctx->key2.key_enc, walk.iv, first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
...@@ -267,7 +267,7 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -267,7 +267,7 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
(u8 *)ctx->key1.key_dec, rounds, blocks, (u8 *)ctx->key1.key_dec, rounds, blocks,
(u8 *)ctx->key2.key_enc, walk.iv, first); (u8 *)ctx->key2.key_enc, walk.iv, first);
err = blkcipher_walk_done(desc, &walk, 0); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
} }
kernel_neon_end(); kernel_neon_end();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment