Commit 4291d085 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman

powerpc/32s: Make pte_update() non atomic on 603 core

On 603 core, TLB miss handler don't do any change to the
page tables so pte_update() doesn't need to be atomic.
Signed-off-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/cc89d3c11fc9c742d0df3454a657a3a00be24046.1643538554.git.christophe.leroy@csgroup.eu
parent 535bda36
...@@ -298,28 +298,35 @@ static inline pte_basic_t pte_update(struct mm_struct *mm, unsigned long addr, p ...@@ -298,28 +298,35 @@ static inline pte_basic_t pte_update(struct mm_struct *mm, unsigned long addr, p
unsigned long clr, unsigned long set, int huge) unsigned long clr, unsigned long set, int huge)
{ {
pte_basic_t old; pte_basic_t old;
unsigned long tmp;
__asm__ __volatile__( if (mmu_has_feature(MMU_FTR_HPTE_TABLE)) {
unsigned long tmp;
asm volatile(
#ifndef CONFIG_PTE_64BIT #ifndef CONFIG_PTE_64BIT
"1: lwarx %0, 0, %3\n" "1: lwarx %0, 0, %3\n"
" andc %1, %0, %4\n" " andc %1, %0, %4\n"
#else #else
"1: lwarx %L0, 0, %3\n" "1: lwarx %L0, 0, %3\n"
" lwz %0, -4(%3)\n" " lwz %0, -4(%3)\n"
" andc %1, %L0, %4\n" " andc %1, %L0, %4\n"
#endif #endif
" or %1, %1, %5\n" " or %1, %1, %5\n"
" stwcx. %1, 0, %3\n" " stwcx. %1, 0, %3\n"
" bne- 1b" " bne- 1b"
: "=&r" (old), "=&r" (tmp), "=m" (*p) : "=&r" (old), "=&r" (tmp), "=m" (*p)
#ifndef CONFIG_PTE_64BIT #ifndef CONFIG_PTE_64BIT
: "r" (p), : "r" (p),
#else #else
: "b" ((unsigned long)(p) + 4), : "b" ((unsigned long)(p) + 4),
#endif #endif
"r" (clr), "r" (set), "m" (*p) "r" (clr), "r" (set), "m" (*p)
: "cc" ); : "cc" );
} else {
old = pte_val(*p);
*p = __pte((old & ~(pte_basic_t)clr) | set);
}
return old; return old;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment