Commit 3cdbc341 authored by Richard Henderson's avatar Richard Henderson

From Ivan Kokshaysky <ink@jurassic.park.msu.ru>:

Implement alpha xchg and cmpxchg for 8 and 16 bit quantities.
parent f075e6af
......@@ -366,7 +366,59 @@ extern void __global_restore_flags(unsigned long flags);
* it must clobber "memory" (also for interrupts in UP).
*/
extern __inline__ unsigned long
static inline unsigned long
__xchg_u8(volatile char *m, unsigned long val)
{
unsigned long ret, tmp, addr64;
__asm__ __volatile__(
" andnot %4,7,%3\n"
" insbl %1,%4,%1\n"
"1: ldq_l %2,0(%3)\n"
" extbl %2,%4,%0\n"
" mskbl %2,%4,%2\n"
" or %1,%2,%2\n"
" stq_c %2,0(%3)\n"
" beq %2,2f\n"
#ifdef CONFIG_SMP
" mb\n"
#endif
".subsection 2\n"
"2: br 1b\n"
".previous"
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
: "r" ((long)m), "1" (val) : "memory");
return ret;
}
static inline unsigned long
__xchg_u16(volatile short *m, unsigned long val)
{
unsigned long ret, tmp, addr64;
__asm__ __volatile__(
" andnot %4,7,%3\n"
" inswl %1,%4,%1\n"
"1: ldq_l %2,0(%3)\n"
" extwl %2,%4,%0\n"
" mskwl %2,%4,%2\n"
" or %1,%2,%2\n"
" stq_c %2,0(%3)\n"
" beq %2,2f\n"
#ifdef CONFIG_SMP
" mb\n"
#endif
".subsection 2\n"
"2: br 1b\n"
".previous"
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
: "r" ((long)m), "1" (val) : "memory");
return ret;
}
static inline unsigned long
__xchg_u32(volatile int *m, unsigned long val)
{
unsigned long dummy;
......@@ -388,7 +440,7 @@ __xchg_u32(volatile int *m, unsigned long val)
return val;
}
extern __inline__ unsigned long
static inline unsigned long
__xchg_u64(volatile long *m, unsigned long val)
{
unsigned long dummy;
......@@ -414,10 +466,14 @@ __xchg_u64(volatile long *m, unsigned long val)
if something tries to do an invalid xchg(). */
extern void __xchg_called_with_bad_pointer(void);
static __inline__ unsigned long
static inline unsigned long
__xchg(volatile void *ptr, unsigned long x, int size)
{
switch (size) {
case 1:
return __xchg_u8(ptr, x);
case 2:
return __xchg_u16(ptr, x);
case 4:
return __xchg_u32(ptr, x);
case 8:
......@@ -449,7 +505,65 @@ __xchg(volatile void *ptr, unsigned long x, int size)
#define __HAVE_ARCH_CMPXCHG 1
extern __inline__ unsigned long
static inline unsigned long
__cmpxchg_u8(volatile char *m, long old, long new)
{
unsigned long prev, tmp, cmp, addr64;
__asm__ __volatile__(
" andnot %5,7,%4\n"
" insbl %1,%5,%1\n"
"1: ldq_l %2,0(%4)\n"
" extbl %2,%5,%0\n"
" cmpeq %0,%6,%3\n"
" beq %3,2f\n"
" mskbl %2,%5,%2\n"
" or %1,%2,%2\n"
" stq_c %2,0(%4)\n"
" beq %2,3f\n"
#ifdef CONFIG_SMP
" mb\n"
#endif
"2:\n"
".subsection 2\n"
"3: br 1b\n"
".previous"
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
return prev;
}
static inline unsigned long
__cmpxchg_u16(volatile short *m, long old, long new)
{
unsigned long prev, tmp, cmp, addr64;
__asm__ __volatile__(
" andnot %5,7,%4\n"
" inswl %1,%5,%1\n"
"1: ldq_l %2,0(%4)\n"
" extwl %2,%5,%0\n"
" cmpeq %0,%6,%3\n"
" beq %3,2f\n"
" mskwl %2,%5,%2\n"
" or %1,%2,%2\n"
" stq_c %2,0(%4)\n"
" beq %2,3f\n"
#ifdef CONFIG_SMP
" mb\n"
#endif
"2:\n"
".subsection 2\n"
"3: br 1b\n"
".previous"
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
return prev;
}
static inline unsigned long
__cmpxchg_u32(volatile int *m, int old, int new)
{
unsigned long prev, cmp;
......@@ -474,7 +588,7 @@ __cmpxchg_u32(volatile int *m, int old, int new)
return prev;
}
extern __inline__ unsigned long
static inline unsigned long
__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
{
unsigned long prev, cmp;
......@@ -503,10 +617,14 @@ __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
if something tries to do an invalid cmpxchg(). */
extern void __cmpxchg_called_with_bad_pointer(void);
static __inline__ unsigned long
static inline unsigned long
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
{
switch (size) {
case 1:
return __cmpxchg_u8(ptr, old, new);
case 2:
return __cmpxchg_u16(ptr, old, new);
case 4:
return __cmpxchg_u32(ptr, old, new);
case 8:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment