Commit 6d5155c2 authored by Markos Chandras's avatar Markos Chandras Committed by Ralf Baechle

MIPS: lib: memset: Use macro to build the __bzero symbol

Build the __bzero symbol using a macor. In EVA mode we will
need to use similar code to do the userspace load operations so
it is better if we use a macro to avoid code duplications.
Signed-off-by: default avatarMarkos Chandras <markos.chandras@imgtec.com>
parent 8483b14a
...@@ -34,6 +34,9 @@ ...@@ -34,6 +34,9 @@
#define FILLPTRG t0 #define FILLPTRG t0
#endif #endif
#define LEGACY_MODE 1
#define EVA_MODE 2
#define EX(insn,reg,addr,handler) \ #define EX(insn,reg,addr,handler) \
9: insn reg, addr; \ 9: insn reg, addr; \
.section __ex_table,"a"; \ .section __ex_table,"a"; \
...@@ -63,33 +66,23 @@ ...@@ -63,33 +66,23 @@
#endif #endif
.endm .endm
/*
* memset(void *s, int c, size_t n)
*
* a0: start of area to clear
* a1: char to fill with
* a2: size of area to clear
*/
.set noreorder .set noreorder
.align 5 .align 5
LEAF(memset)
beqz a1, 1f
move v0, a0 /* result */
andi a1, 0xff /* spread fillword */ /*
LONG_SLL t1, a1, 8 * Macro to generate the __bzero{,_user} symbol
or a1, t1 * Arguments:
LONG_SLL t1, a1, 16 * mode: LEGACY_MODE or EVA_MODE
#if LONGSIZE == 8 */
or a1, t1 .macro __BUILD_BZERO mode
LONG_SLL t1, a1, 32 /* Initialize __memset if this is the first time we call this macro */
#endif .ifnotdef __memset
or a1, t1 .set __memset, 1
1: .hidden __memset /* Make sure it does not leak */
.endif
FEXPORT(__bzero)
sltiu t0, a2, STORSIZE /* very small region? */ sltiu t0, a2, STORSIZE /* very small region? */
bnez t0, .Lsmall_memset bnez t0, .Lsmall_memset\@
andi t0, a0, STORMASK /* aligned? */ andi t0, a0, STORMASK /* aligned? */
#ifdef CONFIG_CPU_MICROMIPS #ifdef CONFIG_CPU_MICROMIPS
...@@ -109,28 +102,28 @@ FEXPORT(__bzero) ...@@ -109,28 +102,28 @@ FEXPORT(__bzero)
R10KCBARRIER(0(ra)) R10KCBARRIER(0(ra))
#ifdef __MIPSEB__ #ifdef __MIPSEB__
EX(LONG_S_L, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
#endif #endif
#ifdef __MIPSEL__ #ifdef __MIPSEL__
EX(LONG_S_R, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
#endif #endif
PTR_SUBU a0, t0 /* long align ptr */ PTR_SUBU a0, t0 /* long align ptr */
PTR_ADDU a2, t0 /* correct size */ PTR_ADDU a2, t0 /* correct size */
1: ori t1, a2, 0x3f /* # of full blocks */ 1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f xori t1, 0x3f
beqz t1, .Lmemset_partial /* no block to fill */ beqz t1, .Lmemset_partial\@ /* no block to fill */
andi t0, a2, 0x40-STORSIZE andi t0, a2, 0x40-STORSIZE
PTR_ADDU t1, a0 /* end address */ PTR_ADDU t1, a0 /* end address */
.set reorder .set reorder
1: PTR_ADDIU a0, 64 1: PTR_ADDIU a0, 64
R10KCBARRIER(0(ra)) R10KCBARRIER(0(ra))
f_fill64 a0, -64, FILL64RG, .Lfwd_fixup f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@
bne t1, a0, 1b bne t1, a0, 1b
.set noreorder .set noreorder
.Lmemset_partial: .Lmemset_partial\@:
R10KCBARRIER(0(ra)) R10KCBARRIER(0(ra))
PTR_LA t1, 2f /* where to start */ PTR_LA t1, 2f /* where to start */
#ifdef CONFIG_CPU_MICROMIPS #ifdef CONFIG_CPU_MICROMIPS
...@@ -150,7 +143,8 @@ FEXPORT(__bzero) ...@@ -150,7 +143,8 @@ FEXPORT(__bzero)
.set push .set push
.set noreorder .set noreorder
.set nomacro .set nomacro
f_fill64 a0, -64, FILL64RG, .Lpartial_fixup /* ... but first do longs ... */ /* ... but first do longs ... */
f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@
2: .set pop 2: .set pop
andi a2, STORMASK /* At most one long to go */ andi a2, STORMASK /* At most one long to go */
...@@ -158,15 +152,15 @@ FEXPORT(__bzero) ...@@ -158,15 +152,15 @@ FEXPORT(__bzero)
PTR_ADDU a0, a2 /* What's left */ PTR_ADDU a0, a2 /* What's left */
R10KCBARRIER(0(ra)) R10KCBARRIER(0(ra))
#ifdef __MIPSEB__ #ifdef __MIPSEB__
EX(LONG_S_R, a1, -1(a0), .Llast_fixup) EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
#endif #endif
#ifdef __MIPSEL__ #ifdef __MIPSEL__
EX(LONG_S_L, a1, -1(a0), .Llast_fixup) EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
#endif #endif
1: jr ra 1: jr ra
move a2, zero move a2, zero
.Lsmall_memset: .Lsmall_memset\@:
beqz a2, 2f beqz a2, 2f
PTR_ADDU t1, a0, a2 PTR_ADDU t1, a0, a2
...@@ -177,13 +171,17 @@ FEXPORT(__bzero) ...@@ -177,13 +171,17 @@ FEXPORT(__bzero)
2: jr ra /* done */ 2: jr ra /* done */
move a2, zero move a2, zero
.if __memset == 1
END(memset) END(memset)
.set __memset, 0
.hidden __memset
.endif
.Lfirst_fixup: .Lfirst_fixup\@:
jr ra jr ra
nop nop
.Lfwd_fixup: .Lfwd_fixup\@:
PTR_L t0, TI_TASK($28) PTR_L t0, TI_TASK($28)
andi a2, 0x3f andi a2, 0x3f
LONG_L t0, THREAD_BUADDR(t0) LONG_L t0, THREAD_BUADDR(t0)
...@@ -191,7 +189,7 @@ FEXPORT(__bzero) ...@@ -191,7 +189,7 @@ FEXPORT(__bzero)
jr ra jr ra
LONG_SUBU a2, t0 LONG_SUBU a2, t0
.Lpartial_fixup: .Lpartial_fixup\@:
PTR_L t0, TI_TASK($28) PTR_L t0, TI_TASK($28)
andi a2, STORMASK andi a2, STORMASK
LONG_L t0, THREAD_BUADDR(t0) LONG_L t0, THREAD_BUADDR(t0)
...@@ -199,6 +197,33 @@ FEXPORT(__bzero) ...@@ -199,6 +197,33 @@ FEXPORT(__bzero)
jr ra jr ra
LONG_SUBU a2, t0 LONG_SUBU a2, t0
.Llast_fixup: .Llast_fixup\@:
jr ra jr ra
andi v1, a2, STORMASK andi v1, a2, STORMASK
.endm
/*
* memset(void *s, int c, size_t n)
*
* a0: start of area to clear
* a1: char to fill with
* a2: size of area to clear
*/
LEAF(memset)
beqz a1, 1f
move v0, a0 /* result */
andi a1, 0xff /* spread fillword */
LONG_SLL t1, a1, 8
or a1, t1
LONG_SLL t1, a1, 16
#if LONGSIZE == 8
or a1, t1
LONG_SLL t1, a1, 32
#endif
or a1, t1
1:
FEXPORT(__bzero)
__BUILD_BZERO LEGACY_MODE
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment