Commit 6988631b authored by Mark Rutland's avatar Mark Rutland Committed by Peter Zijlstra

locking/atomic: cmpxchg: make `generic` a prefix

The asm-generic implementations of cmpxchg_local() and cmpxchg64_local()
use a `_generic` suffix to distinguish themselves from arch code or
wrappers used elsewhere.

Subsequent patches will add ARCH_ATOMIC support to these
implementations, and will distinguish more functions with a `generic`
portion. To align with how ARCH_ATOMIC uses an `arch_` prefix, it would
be helpful to use a `generic_` prefix rather than a `_generic` suffix.

In preparation for this, this patch renames the existing functions to
make `generic` a prefix rather than a suffix. There should be no
functional change as a result of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Acked-by: default avatarGeert Uytterhoeven <geert@linux-m68k.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Will Deacon <will@kernel.org>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/r/20210525140232.53872-12-mark.rutland@arm.com
parent 1bdadf46
...@@ -135,13 +135,13 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size ...@@ -135,13 +135,13 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
* them available. * them available.
*/ */
#define cmpxchg_local(ptr, o, n) ({ \ #define cmpxchg_local(ptr, o, n) ({ \
(__typeof(*ptr))__cmpxchg_local_generic((ptr), \ (__typeof(*ptr))__generic_cmpxchg_local((ptr), \
(unsigned long)(o), \ (unsigned long)(o), \
(unsigned long)(n), \ (unsigned long)(n), \
sizeof(*(ptr))); \ sizeof(*(ptr))); \
}) })
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#include <asm-generic/cmpxchg.h> #include <asm-generic/cmpxchg.h>
...@@ -224,7 +224,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -224,7 +224,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
#ifdef CONFIG_CPU_V6 /* min ARCH == ARMv6 */ #ifdef CONFIG_CPU_V6 /* min ARCH == ARMv6 */
case 1: case 1:
case 2: case 2:
ret = __cmpxchg_local_generic(ptr, old, new, size); ret = __generic_cmpxchg_local(ptr, old, new, size);
break; break;
#endif #endif
default: default:
......
...@@ -80,7 +80,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -80,7 +80,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
#include <asm-generic/cmpxchg-local.h> #include <asm-generic/cmpxchg-local.h>
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
extern unsigned long __invalid_cmpxchg_size(volatile void *, extern unsigned long __invalid_cmpxchg_size(volatile void *,
unsigned long, unsigned long, int); unsigned long, unsigned long, int);
......
...@@ -222,7 +222,7 @@ unsigned long __cmpxchg(volatile void *ptr, unsigned long old, ...@@ -222,7 +222,7 @@ unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
#else #else
# include <asm-generic/cmpxchg-local.h> # include <asm-generic/cmpxchg-local.h>
# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) # define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
# ifdef CONFIG_SMP # ifdef CONFIG_SMP
......
...@@ -98,7 +98,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -98,7 +98,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
#endif #endif
case 4: return __cmpxchg_u32(ptr, old, new_); case 4: return __cmpxchg_u32(ptr, old, new_);
default: default:
return __cmpxchg_local_generic(ptr, old, new_, size); return __generic_cmpxchg_local(ptr, old, new_, size);
} }
} }
...@@ -116,7 +116,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -116,7 +116,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
cmpxchg_local((ptr), (o), (n)); \ cmpxchg_local((ptr), (o), (n)); \
}) })
#else #else
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#endif #endif
#define cmpxchg64(ptr, o, n) __cmpxchg_u64(ptr, o, n) #define cmpxchg64(ptr, o, n) __cmpxchg_u64(ptr, o, n)
......
...@@ -524,7 +524,7 @@ __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new, ...@@ -524,7 +524,7 @@ __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
}) })
#else #else
#include <asm-generic/cmpxchg-local.h> #include <asm-generic/cmpxchg-local.h>
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#endif #endif
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
......
...@@ -73,8 +73,8 @@ u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new); ...@@ -73,8 +73,8 @@ u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new);
* them available. * them available.
*/ */
#define cmpxchg_local(ptr, o, n) \ #define cmpxchg_local(ptr, o, n) \
((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
(unsigned long)(n), sizeof(*(ptr)))) (unsigned long)(n), sizeof(*(ptr))))
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#endif /* __ARCH_SPARC_CMPXCHG__ */ #endif /* __ARCH_SPARC_CMPXCHG__ */
...@@ -189,7 +189,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -189,7 +189,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
case 4: case 4:
case 8: return __cmpxchg(ptr, old, new, size); case 8: return __cmpxchg(ptr, old, new, size);
default: default:
return __cmpxchg_local_generic(ptr, old, new, size); return __generic_cmpxchg_local(ptr, old, new, size);
} }
return old; return old;
......
...@@ -97,7 +97,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -97,7 +97,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
case 4: case 4:
return __cmpxchg_u32(ptr, old, new); return __cmpxchg_u32(ptr, old, new);
default: default:
return __cmpxchg_local_generic(ptr, old, new, size); return __generic_cmpxchg_local(ptr, old, new, size);
} }
return old; return old;
...@@ -108,9 +108,9 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, ...@@ -108,9 +108,9 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
* them available. * them available.
*/ */
#define cmpxchg_local(ptr, o, n) \ #define cmpxchg_local(ptr, o, n) \
((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
(unsigned long)(n), sizeof(*(ptr)))) (unsigned long)(n), sizeof(*(ptr))))
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
/* /*
......
...@@ -12,7 +12,7 @@ extern unsigned long wrong_size_cmpxchg(volatile void *ptr) ...@@ -12,7 +12,7 @@ extern unsigned long wrong_size_cmpxchg(volatile void *ptr)
* Generic version of __cmpxchg_local (disables interrupts). Takes an unsigned * Generic version of __cmpxchg_local (disables interrupts). Takes an unsigned
* long parameter, supporting various types of architectures. * long parameter, supporting various types of architectures.
*/ */
static inline unsigned long __cmpxchg_local_generic(volatile void *ptr, static inline unsigned long __generic_cmpxchg_local(volatile void *ptr,
unsigned long old, unsigned long new, int size) unsigned long old, unsigned long new, int size)
{ {
unsigned long flags, prev; unsigned long flags, prev;
...@@ -51,7 +51,7 @@ static inline unsigned long __cmpxchg_local_generic(volatile void *ptr, ...@@ -51,7 +51,7 @@ static inline unsigned long __cmpxchg_local_generic(volatile void *ptr,
/* /*
* Generic version of __cmpxchg64_local. Takes an u64 parameter. * Generic version of __cmpxchg64_local. Takes an u64 parameter.
*/ */
static inline u64 __cmpxchg64_local_generic(volatile void *ptr, static inline u64 __generic_cmpxchg64_local(volatile void *ptr,
u64 old, u64 new) u64 old, u64 new)
{ {
u64 prev; u64 prev;
......
...@@ -94,13 +94,13 @@ unsigned long __xchg(unsigned long x, volatile void *ptr, int size) ...@@ -94,13 +94,13 @@ unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
#ifndef cmpxchg_local #ifndef cmpxchg_local
#define cmpxchg_local(ptr, o, n) ({ \ #define cmpxchg_local(ptr, o, n) ({ \
((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
(unsigned long)(n), sizeof(*(ptr)))); \ (unsigned long)(n), sizeof(*(ptr)))); \
}) })
#endif #endif
#ifndef cmpxchg64_local #ifndef cmpxchg64_local
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#endif #endif
#define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n)) #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment