Commit 29b8e53c authored by Al Viro's avatar Al Viro Committed by Paul E. McKenney

parisc: __cmpxchg_u32(): lift conversion into the callers

__cmpxchg_u32() return value is unsigned int explicitly cast to
unsigned long.  Both callers are returns from functions that
return unsigned long; might as well have __cmpxchg_u32()
return that unsigned int (aka u32) and let the callers convert
implicitly.
Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
Signed-off-by: default avatarPaul E. McKenney <paulmck@kernel.org>
parent dbc93fdc
...@@ -57,8 +57,7 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size) ...@@ -57,8 +57,7 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
extern void __cmpxchg_called_with_bad_pointer(void); extern void __cmpxchg_called_with_bad_pointer(void);
/* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */ /* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */
extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old, extern u32 __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
unsigned int new_);
extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_); extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_); extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_);
......
...@@ -68,16 +68,16 @@ u64 notrace __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new) ...@@ -68,16 +68,16 @@ u64 notrace __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new)
return prev; return prev;
} }
unsigned long notrace __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) u32 notrace __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
{ {
unsigned long flags; unsigned long flags;
unsigned int prev; u32 prev;
_atomic_spin_lock_irqsave(ptr, flags); _atomic_spin_lock_irqsave(ptr, flags);
if ((prev = *ptr) == old) if ((prev = *ptr) == old)
*ptr = new; *ptr = new;
_atomic_spin_unlock_irqrestore(ptr, flags); _atomic_spin_unlock_irqrestore(ptr, flags);
return (unsigned long)prev; return prev;
} }
u8 notrace __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new) u8 notrace __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment