Commit d47b2dc8 authored by WANG Xuerui's avatar WANG Xuerui Committed by Huacai Chen

LoongArch: Simplify "BEQ/BNE foo, zero" with BEQZ/BNEZ

While B{EQ,NE}Z and B{EQ,NE} are different instructions, and the vastly
expanded range for branch destination does not really matter in the few
cases touched, use the B{EQ,NE}Z where possible for shorter lines and
better consistency (e.g. some places used "BEQ foo, zero", while some
used "BEQ zero, foo").
Signed-off-by: default avatarWANG Xuerui <git@xen0n.name>
Signed-off-by: default avatarHuacai Chen <chenhuacai@loongson.cn>
parent 57ce5d3e
...@@ -160,7 +160,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v) ...@@ -160,7 +160,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
" move %1, %0 \n" " move %1, %0 \n"
" blt %0, $zero, 2f \n" " blt %0, $zero, 2f \n"
" sc.w %1, %2 \n" " sc.w %1, %2 \n"
" beq $zero, %1, 1b \n" " beqz %1, 1b \n"
"2: \n" "2: \n"
__WEAK_LLSC_MB __WEAK_LLSC_MB
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
...@@ -173,7 +173,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v) ...@@ -173,7 +173,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
" move %1, %0 \n" " move %1, %0 \n"
" blt %0, $zero, 2f \n" " blt %0, $zero, 2f \n"
" sc.w %1, %2 \n" " sc.w %1, %2 \n"
" beq $zero, %1, 1b \n" " beqz %1, 1b \n"
"2: \n" "2: \n"
__WEAK_LLSC_MB __WEAK_LLSC_MB
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
...@@ -323,7 +323,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v) ...@@ -323,7 +323,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
" move %1, %0 \n" " move %1, %0 \n"
" blt %0, $zero, 2f \n" " blt %0, $zero, 2f \n"
" sc.d %1, %2 \n" " sc.d %1, %2 \n"
" beq %1, $zero, 1b \n" " beqz %1, 1b \n"
"2: \n" "2: \n"
__WEAK_LLSC_MB __WEAK_LLSC_MB
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
...@@ -336,7 +336,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v) ...@@ -336,7 +336,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
" move %1, %0 \n" " move %1, %0 \n"
" blt %0, $zero, 2f \n" " blt %0, $zero, 2f \n"
" sc.d %1, %2 \n" " sc.d %1, %2 \n"
" beq %1, $zero, 1b \n" " beqz %1, 1b \n"
"2: \n" "2: \n"
__WEAK_LLSC_MB __WEAK_LLSC_MB
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
......
...@@ -57,7 +57,7 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x, ...@@ -57,7 +57,7 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
" bne %0, %z3, 2f \n" \ " bne %0, %z3, 2f \n" \
" move $t0, %z4 \n" \ " move $t0, %z4 \n" \
" " st " $t0, %1 \n" \ " " st " $t0, %1 \n" \
" beq $zero, $t0, 1b \n" \ " beqz $t0, 1b \n" \
"2: \n" \ "2: \n" \
__WEAK_LLSC_MB \ __WEAK_LLSC_MB \
: "=&r" (__ret), "=ZB"(*m) \ : "=&r" (__ret), "=ZB"(*m) \
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
"1: ll.w %1, %4 # __futex_atomic_op\n" \ "1: ll.w %1, %4 # __futex_atomic_op\n" \
" " insn " \n" \ " " insn " \n" \
"2: sc.w $t0, %2 \n" \ "2: sc.w $t0, %2 \n" \
" beq $t0, $zero, 1b \n" \ " beqz $t0, 1b \n" \
"3: \n" \ "3: \n" \
" .section .fixup,\"ax\" \n" \ " .section .fixup,\"ax\" \n" \
"4: li.w %0, %6 \n" \ "4: li.w %0, %6 \n" \
...@@ -84,7 +84,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newv ...@@ -84,7 +84,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newv
" bne %1, %z4, 3f \n" " bne %1, %z4, 3f \n"
" move $t0, %z5 \n" " move $t0, %z5 \n"
"2: sc.w $t0, %2 \n" "2: sc.w $t0, %2 \n"
" beq $zero, $t0, 1b \n" " beqz $t0, 1b \n"
"3: \n" "3: \n"
__WEAK_LLSC_MB __WEAK_LLSC_MB
" .section .fixup,\"ax\" \n" " .section .fixup,\"ax\" \n"
......
...@@ -80,7 +80,7 @@ vmalloc_done_load: ...@@ -80,7 +80,7 @@ vmalloc_done_load:
* see if we need to jump to huge tlb processing. * see if we need to jump to huge tlb processing.
*/ */
andi t0, ra, _PAGE_HUGE andi t0, ra, _PAGE_HUGE
bne t0, zero, tlb_huge_update_load bnez t0, tlb_huge_update_load
csrrd t0, LOONGARCH_CSR_BADV csrrd t0, LOONGARCH_CSR_BADV
srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER) srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER)
...@@ -100,12 +100,12 @@ smp_pgtable_change_load: ...@@ -100,12 +100,12 @@ smp_pgtable_change_load:
srli.d ra, t0, _PAGE_PRESENT_SHIFT srli.d ra, t0, _PAGE_PRESENT_SHIFT
andi ra, ra, 1 andi ra, ra, 1
beq ra, zero, nopage_tlb_load beqz ra, nopage_tlb_load
ori t0, t0, _PAGE_VALID ori t0, t0, _PAGE_VALID
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, smp_pgtable_change_load beqz t0, smp_pgtable_change_load
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
#endif #endif
...@@ -139,13 +139,13 @@ tlb_huge_update_load: ...@@ -139,13 +139,13 @@ tlb_huge_update_load:
#endif #endif
srli.d ra, t0, _PAGE_PRESENT_SHIFT srli.d ra, t0, _PAGE_PRESENT_SHIFT
andi ra, ra, 1 andi ra, ra, 1
beq ra, zero, nopage_tlb_load beqz ra, nopage_tlb_load
tlbsrch tlbsrch
ori t0, t0, _PAGE_VALID ori t0, t0, _PAGE_VALID
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, tlb_huge_update_load beqz t0, tlb_huge_update_load
ld.d t0, t1, 0 ld.d t0, t1, 0
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
...@@ -244,7 +244,7 @@ vmalloc_done_store: ...@@ -244,7 +244,7 @@ vmalloc_done_store:
* see if we need to jump to huge tlb processing. * see if we need to jump to huge tlb processing.
*/ */
andi t0, ra, _PAGE_HUGE andi t0, ra, _PAGE_HUGE
bne t0, zero, tlb_huge_update_store bnez t0, tlb_huge_update_store
csrrd t0, LOONGARCH_CSR_BADV csrrd t0, LOONGARCH_CSR_BADV
srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER) srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER)
...@@ -265,12 +265,12 @@ smp_pgtable_change_store: ...@@ -265,12 +265,12 @@ smp_pgtable_change_store:
srli.d ra, t0, _PAGE_PRESENT_SHIFT srli.d ra, t0, _PAGE_PRESENT_SHIFT
andi ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT) andi ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT)
xori ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT) xori ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT)
bne ra, zero, nopage_tlb_store bnez ra, nopage_tlb_store
ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED)
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, smp_pgtable_change_store beqz t0, smp_pgtable_change_store
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
#endif #endif
...@@ -306,14 +306,14 @@ tlb_huge_update_store: ...@@ -306,14 +306,14 @@ tlb_huge_update_store:
srli.d ra, t0, _PAGE_PRESENT_SHIFT srli.d ra, t0, _PAGE_PRESENT_SHIFT
andi ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT) andi ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT)
xori ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT) xori ra, ra, ((_PAGE_PRESENT | _PAGE_WRITE) >> _PAGE_PRESENT_SHIFT)
bne ra, zero, nopage_tlb_store bnez ra, nopage_tlb_store
tlbsrch tlbsrch
ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED)
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, tlb_huge_update_store beqz t0, tlb_huge_update_store
ld.d t0, t1, 0 ld.d t0, t1, 0
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
...@@ -411,7 +411,7 @@ vmalloc_done_modify: ...@@ -411,7 +411,7 @@ vmalloc_done_modify:
* see if we need to jump to huge tlb processing. * see if we need to jump to huge tlb processing.
*/ */
andi t0, ra, _PAGE_HUGE andi t0, ra, _PAGE_HUGE
bne t0, zero, tlb_huge_update_modify bnez t0, tlb_huge_update_modify
csrrd t0, LOONGARCH_CSR_BADV csrrd t0, LOONGARCH_CSR_BADV
srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER) srli.d t0, t0, (PAGE_SHIFT + PTE_ORDER)
...@@ -431,12 +431,12 @@ smp_pgtable_change_modify: ...@@ -431,12 +431,12 @@ smp_pgtable_change_modify:
srli.d ra, t0, _PAGE_WRITE_SHIFT srli.d ra, t0, _PAGE_WRITE_SHIFT
andi ra, ra, 1 andi ra, ra, 1
beq ra, zero, nopage_tlb_modify beqz ra, nopage_tlb_modify
ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED)
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, smp_pgtable_change_modify beqz t0, smp_pgtable_change_modify
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
#endif #endif
...@@ -471,14 +471,14 @@ tlb_huge_update_modify: ...@@ -471,14 +471,14 @@ tlb_huge_update_modify:
srli.d ra, t0, _PAGE_WRITE_SHIFT srli.d ra, t0, _PAGE_WRITE_SHIFT
andi ra, ra, 1 andi ra, ra, 1
beq ra, zero, nopage_tlb_modify beqz ra, nopage_tlb_modify
tlbsrch tlbsrch
ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED)
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sc.d t0, t1, 0 sc.d t0, t1, 0
beq t0, zero, tlb_huge_update_modify beqz t0, tlb_huge_update_modify
ld.d t0, t1, 0 ld.d t0, t1, 0
#else #else
st.d t0, t1, 0 st.d t0, t1, 0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment