Commit 15d914d7 authored by Joakim Tjernlund's avatar Joakim Tjernlund Committed by Benjamin Herrenschmidt

powerpc/8xx: Start using dcbX instructions in various copy routines

Now that 8xx can fixup dcbX instructions, start using them
where possible like every other PowerPc arch do.
Signed-off-by: default avatarJoakim Tjernlund <Joakim.Tjernlund@transmode.se>
Signed-off-by: default avatarBenjamin Herrenschmidt <benh@kernel.crashing.org>
parent 0c466169
...@@ -502,15 +502,7 @@ _GLOBAL(clear_pages) ...@@ -502,15 +502,7 @@ _GLOBAL(clear_pages)
li r0,PAGE_SIZE/L1_CACHE_BYTES li r0,PAGE_SIZE/L1_CACHE_BYTES
slw r0,r0,r4 slw r0,r0,r4
mtctr r0 mtctr r0
#ifdef CONFIG_8xx
li r4, 0
1: stw r4, 0(r3)
stw r4, 4(r3)
stw r4, 8(r3)
stw r4, 12(r3)
#else
1: dcbz 0,r3 1: dcbz 0,r3
#endif
addi r3,r3,L1_CACHE_BYTES addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
blr blr
...@@ -535,15 +527,6 @@ _GLOBAL(copy_page) ...@@ -535,15 +527,6 @@ _GLOBAL(copy_page)
addi r3,r3,-4 addi r3,r3,-4
addi r4,r4,-4 addi r4,r4,-4
#ifdef CONFIG_8xx
/* don't use prefetch on 8xx */
li r0,4096/L1_CACHE_BYTES
mtctr r0
1: COPY_16_BYTES
bdnz 1b
blr
#else /* not 8xx, we can prefetch */
li r5,4 li r5,4
#if MAX_COPY_PREFETCH > 1 #if MAX_COPY_PREFETCH > 1
...@@ -584,7 +567,6 @@ _GLOBAL(copy_page) ...@@ -584,7 +567,6 @@ _GLOBAL(copy_page)
li r0,MAX_COPY_PREFETCH li r0,MAX_COPY_PREFETCH
li r11,4 li r11,4
b 2b b 2b
#endif /* CONFIG_8xx */
/* /*
* void atomic_clear_mask(atomic_t mask, atomic_t *addr) * void atomic_clear_mask(atomic_t mask, atomic_t *addr)
......
...@@ -98,20 +98,7 @@ _GLOBAL(cacheable_memzero) ...@@ -98,20 +98,7 @@ _GLOBAL(cacheable_memzero)
bdnz 4b bdnz 4b
3: mtctr r9 3: mtctr r9
li r7,4 li r7,4
#if !defined(CONFIG_8xx)
10: dcbz r7,r6 10: dcbz r7,r6
#else
10: stw r4, 4(r6)
stw r4, 8(r6)
stw r4, 12(r6)
stw r4, 16(r6)
#if CACHE_LINE_SIZE >= 32
stw r4, 20(r6)
stw r4, 24(r6)
stw r4, 28(r6)
stw r4, 32(r6)
#endif /* CACHE_LINE_SIZE */
#endif
addi r6,r6,CACHELINE_BYTES addi r6,r6,CACHELINE_BYTES
bdnz 10b bdnz 10b
clrlwi r5,r8,32-LG_CACHELINE_BYTES clrlwi r5,r8,32-LG_CACHELINE_BYTES
...@@ -200,9 +187,7 @@ _GLOBAL(cacheable_memcpy) ...@@ -200,9 +187,7 @@ _GLOBAL(cacheable_memcpy)
mtctr r0 mtctr r0
beq 63f beq 63f
53: 53:
#if !defined(CONFIG_8xx)
dcbz r11,r6 dcbz r11,r6
#endif
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_BYTES >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES COPY_16_BYTES
...@@ -356,14 +341,6 @@ _GLOBAL(__copy_tofrom_user) ...@@ -356,14 +341,6 @@ _GLOBAL(__copy_tofrom_user)
li r11,4 li r11,4
beq 63f beq 63f
#ifdef CONFIG_8xx
/* Don't use prefetch on 8xx */
mtctr r0
li r0,0
53: COPY_16_BYTES_WITHEX(0)
bdnz 53b
#else /* not CONFIG_8xx */
/* Here we decide how far ahead to prefetch the source */ /* Here we decide how far ahead to prefetch the source */
li r3,4 li r3,4
cmpwi r0,1 cmpwi r0,1
...@@ -416,7 +393,6 @@ _GLOBAL(__copy_tofrom_user) ...@@ -416,7 +393,6 @@ _GLOBAL(__copy_tofrom_user)
li r3,4 li r3,4
li r7,0 li r7,0
bne 114b bne 114b
#endif /* CONFIG_8xx */
63: srwi. r0,r5,2 63: srwi. r0,r5,2
mtctr r0 mtctr r0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment