Commit e17e209e authored by Jussi Kivilinna's avatar Jussi Kivilinna Committed by Herbert Xu

crypto: cast5-avx: use ENTRY()/ENDPROC() for assembler functions and localize jump targets

Signed-off-by: default avatarJussi Kivilinna <jussi.kivilinna@mbnet.fi>
Acked-by: default avatarDavid S. Miller <davem@davemloft.net>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 59990684
...@@ -23,6 +23,8 @@ ...@@ -23,6 +23,8 @@
* *
*/ */
#include <linux/linkage.h>
.file "cast5-avx-x86_64-asm_64.S" .file "cast5-avx-x86_64-asm_64.S"
.extern cast_s1 .extern cast_s1
...@@ -211,8 +213,6 @@ ...@@ -211,8 +213,6 @@
.text .text
.align 16 .align 16
.type __cast5_enc_blk16,@function;
__cast5_enc_blk16: __cast5_enc_blk16:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
...@@ -263,14 +263,14 @@ __cast5_enc_blk16: ...@@ -263,14 +263,14 @@ __cast5_enc_blk16:
movzbl rr(CTX), %eax; movzbl rr(CTX), %eax;
testl %eax, %eax; testl %eax, %eax;
jnz __skip_enc; jnz .L__skip_enc;
round(RL, RR, 12, 1); round(RL, RR, 12, 1);
round(RR, RL, 13, 2); round(RR, RL, 13, 2);
round(RL, RR, 14, 3); round(RL, RR, 14, 3);
round(RR, RL, 15, 1); round(RR, RL, 15, 1);
__skip_enc: .L__skip_enc:
popq %rbx; popq %rbx;
popq %rbp; popq %rbp;
...@@ -282,10 +282,9 @@ __skip_enc: ...@@ -282,10 +282,9 @@ __skip_enc:
outunpack_blocks(RR4, RL4, RTMP, RX, RKM); outunpack_blocks(RR4, RL4, RTMP, RX, RKM);
ret; ret;
ENDPROC(__cast5_enc_blk16)
.align 16 .align 16
.type __cast5_dec_blk16,@function;
__cast5_dec_blk16: __cast5_dec_blk16:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
...@@ -323,14 +322,14 @@ __cast5_dec_blk16: ...@@ -323,14 +322,14 @@ __cast5_dec_blk16:
movzbl rr(CTX), %eax; movzbl rr(CTX), %eax;
testl %eax, %eax; testl %eax, %eax;
jnz __skip_dec; jnz .L__skip_dec;
round(RL, RR, 15, 1); round(RL, RR, 15, 1);
round(RR, RL, 14, 3); round(RR, RL, 14, 3);
round(RL, RR, 13, 2); round(RL, RR, 13, 2);
round(RR, RL, 12, 1); round(RR, RL, 12, 1);
__dec_tail: .L__dec_tail:
round(RL, RR, 11, 3); round(RL, RR, 11, 3);
round(RR, RL, 10, 2); round(RR, RL, 10, 2);
round(RL, RR, 9, 1); round(RL, RR, 9, 1);
...@@ -355,15 +354,12 @@ __dec_tail: ...@@ -355,15 +354,12 @@ __dec_tail:
ret; ret;
__skip_dec: .L__skip_dec:
vpsrldq $4, RKR, RKR; vpsrldq $4, RKR, RKR;
jmp __dec_tail; jmp .L__dec_tail;
ENDPROC(__cast5_dec_blk16)
.align 16 ENTRY(cast5_ecb_enc_16way)
.global cast5_ecb_enc_16way
.type cast5_ecb_enc_16way,@function;
cast5_ecb_enc_16way:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
* %rsi: dst * %rsi: dst
...@@ -393,12 +389,9 @@ cast5_ecb_enc_16way: ...@@ -393,12 +389,9 @@ cast5_ecb_enc_16way:
vmovdqu RL4, (7*4*4)(%r11); vmovdqu RL4, (7*4*4)(%r11);
ret; ret;
ENDPROC(cast5_ecb_enc_16way)
.align 16 ENTRY(cast5_ecb_dec_16way)
.global cast5_ecb_dec_16way
.type cast5_ecb_dec_16way,@function;
cast5_ecb_dec_16way:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
* %rsi: dst * %rsi: dst
...@@ -428,12 +421,9 @@ cast5_ecb_dec_16way: ...@@ -428,12 +421,9 @@ cast5_ecb_dec_16way:
vmovdqu RL4, (7*4*4)(%r11); vmovdqu RL4, (7*4*4)(%r11);
ret; ret;
ENDPROC(cast5_ecb_dec_16way)
.align 16 ENTRY(cast5_cbc_dec_16way)
.global cast5_cbc_dec_16way
.type cast5_cbc_dec_16way,@function;
cast5_cbc_dec_16way:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
* %rsi: dst * %rsi: dst
...@@ -480,12 +470,9 @@ cast5_cbc_dec_16way: ...@@ -480,12 +470,9 @@ cast5_cbc_dec_16way:
popq %r12; popq %r12;
ret; ret;
ENDPROC(cast5_cbc_dec_16way)
.align 16 ENTRY(cast5_ctr_16way)
.global cast5_ctr_16way
.type cast5_ctr_16way,@function;
cast5_ctr_16way:
/* input: /* input:
* %rdi: ctx, CTX * %rdi: ctx, CTX
* %rsi: dst * %rsi: dst
...@@ -556,3 +543,4 @@ cast5_ctr_16way: ...@@ -556,3 +543,4 @@ cast5_ctr_16way:
popq %r12; popq %r12;
ret; ret;
ENDPROC(cast5_ctr_16way)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment