Commit 10b88270 authored by Mathieu Desnoyers's avatar Mathieu Desnoyers Committed by Linus Torvalds

Add cmpxchg_local to blackfin, replace __cmpxchg by generic cmpxchg

Use the new generic cmpxchg_local (disables interrupt). Also use the generic
cmpxchg as fallback if SMP is not set since nobody seems to know why __cmpxchg
has been implemented in assembly in the first place thather than in plain C.
Signed-off-by: default avatarMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Bryan Wu <bryan.wu@analog.com>
Cc: Michael Frysinger <michael.frysinger@analog.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 5e86c11d
...@@ -183,55 +183,20 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, ...@@ -183,55 +183,20 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
return tmp; return tmp;
} }
#include <asm-generic/cmpxchg-local.h>
/* /*
* Atomic compare and exchange. Compare OLD with MEM, if identical, * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
* store NEW in MEM. Return the initial value in MEM. Success is * them available.
* indicated by comparing RETURN with OLD.
*/ */
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, #define cmpxchg_local(ptr, o, n) \
unsigned long new, int size) ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
{ (unsigned long)(n), sizeof(*(ptr))))
unsigned long tmp = 0; #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
unsigned long flags = 0;
local_irq_save(flags);
switch (size) {
case 1:
__asm__ __volatile__
("%0 = b%3 (z);\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"b%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
case 2:
__asm__ __volatile__
("%0 = w%3 (z);\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"w%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
case 4:
__asm__ __volatile__
("%0 = %3;\n\t"
"CC = %1 == %0;\n\t"
"IF !CC JUMP 1f;\n\t"
"%3 = %2;\n\t"
"1:\n\t"
: "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
break;
}
local_irq_restore(flags);
return tmp;
}
#define cmpxchg(ptr,o,n)\ #ifndef CONFIG_SMP
((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\ #include <asm-generic/cmpxchg.h>
(unsigned long)(n),sizeof(*(ptr)))) #endif
#define prepare_to_switch() do { } while(0) #define prepare_to_switch() do { } while(0)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment