Commit 9537db24 authored by Paul Burton's avatar Paul Burton

MIPS: atomic: Handle !kernel_uses_llsc first

Handle the !kernel_uses_llsc path first in our ATOMIC_OP(),
ATOMIC_OP_RETURN() & ATOMIC_FETCH_OP() macros & return from within the
block. This allows us to de-indent the kernel_uses_llsc path by one
level which will be useful when making further changes.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent 36d3295c
...@@ -45,13 +45,21 @@ ...@@ -45,13 +45,21 @@
#define ATOMIC_OP(op, c_op, asm_op) \ #define ATOMIC_OP(op, c_op, asm_op) \
static __inline__ void atomic_##op(int i, atomic_t * v) \ static __inline__ void atomic_##op(int i, atomic_t * v) \
{ \ { \
if (kernel_uses_llsc) { \
int temp; \ int temp; \
\ \
if (!kernel_uses_llsc) { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
v->counter c_op i; \
raw_local_irq_restore(flags); \
return; \
} \
\
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \ " .set " MIPS_ISA_LEVEL " \n" \
"1: ll %0, %1 # atomic_" #op " \n" \ "1: ll %0, %1 # atomic_" #op " \n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
...@@ -59,27 +67,28 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ ...@@ -59,27 +67,28 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
" .set pop \n" \ " .set pop \n" \
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
v->counter c_op i; \
raw_local_irq_restore(flags); \
} \
} }
#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
{ \ { \
int result; \ int temp, result; \
\ \
if (kernel_uses_llsc) { \ if (!kernel_uses_llsc) { \
int temp; \ unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
result c_op i; \
v->counter = result; \
raw_local_irq_restore(flags); \
return result; \
} \
\ \
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \ " .set " MIPS_ISA_LEVEL " \n" \
"1: ll %1, %2 # atomic_" #op "_return \n" \ "1: ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \ " sc %0, %2 \n" \
...@@ -89,15 +98,6 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ ...@@ -89,15 +98,6 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
result c_op i; \
v->counter = result; \
raw_local_irq_restore(flags); \
} \
\ \
return result; \ return result; \
} }
...@@ -105,10 +105,17 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ ...@@ -105,10 +105,17 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
#define ATOMIC_FETCH_OP(op, c_op, asm_op) \ #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
{ \ { \
int result; \ int temp, result; \
\ \
if (kernel_uses_llsc) { \ if (!kernel_uses_llsc) { \
int temp; \ unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
v->counter c_op i; \
raw_local_irq_restore(flags); \
return result; \
} \
\ \
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
...@@ -123,14 +130,6 @@ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \ ...@@ -123,14 +130,6 @@ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
v->counter c_op i; \
raw_local_irq_restore(flags); \
} \
\ \
return result; \ return result; \
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment