Commit 9560782f authored by Russell King's avatar Russell King Committed by Russell King

[PATCH] ARM SMP: Use exclusive load/store for __xchg

Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
parent 6b6a93c6
...@@ -323,12 +323,8 @@ do { \ ...@@ -323,12 +323,8 @@ do { \
* NOTE that this solution won't work on an SMP system, so explcitly * NOTE that this solution won't work on an SMP system, so explcitly
* forbid it here. * forbid it here.
*/ */
#ifdef CONFIG_SMP
#error SMP is not supported on SA1100/SA110
#else
#define swp_is_buggy #define swp_is_buggy
#endif #endif
#endif
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
{ {
...@@ -337,9 +333,36 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size ...@@ -337,9 +333,36 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
#ifdef swp_is_buggy #ifdef swp_is_buggy
unsigned long flags; unsigned long flags;
#endif #endif
#if __LINUX_ARM_ARCH__ >= 6
unsigned int tmp;
#endif
switch (size) { switch (size) {
#ifdef swp_is_buggy #if __LINUX_ARM_ARCH__ >= 6
case 1:
asm volatile("@ __xchg1\n"
"1: ldrexb %0, [%3]\n"
" strexb %1, %2, [%3]\n"
" teq %1, #0\n"
" bne 1b"
: "=&r" (ret), "=&r" (tmp)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
case 4:
asm volatile("@ __xchg4\n"
"1: ldrex %0, [%3]\n"
" strex %1, %2, [%3]\n"
" teq %1, #0\n"
" bne 1b"
: "=&r" (ret), "=&r" (tmp)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#elif defined(swp_is_buggy)
#ifdef CONFIG_SMP
#error SMP is not supported on this platform
#endif
case 1: case 1:
local_irq_save(flags); local_irq_save(flags);
ret = *(volatile unsigned char *)ptr; ret = *(volatile unsigned char *)ptr;
...@@ -354,18 +377,24 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size ...@@ -354,18 +377,24 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
local_irq_restore(flags); local_irq_restore(flags);
break; break;
#else #else
case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]" case 1:
asm volatile("@ __xchg1\n"
" swpb %0, %1, [%2]"
: "=&r" (ret) : "=&r" (ret)
: "r" (x), "r" (ptr) : "r" (x), "r" (ptr)
: "memory", "cc"); : "memory", "cc");
break; break;
case 4: __asm__ __volatile__ ("swp %0, %1, [%2]" case 4:
asm volatile("@ __xchg4\n"
" swp %0, %1, [%2]"
: "=&r" (ret) : "=&r" (ret)
: "r" (x), "r" (ptr) : "r" (x), "r" (ptr)
: "memory", "cc"); : "memory", "cc");
break; break;
#endif #endif
default: __bad_xchg(ptr, size), ret = 0; default:
__bad_xchg(ptr, size), ret = 0;
break;
} }
return ret; return ret;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment