Commit 7cc7eaad authored by Mark Rutland's avatar Mark Rutland Committed by Ingo Molnar

atomics/treewide: Clean up '*_andnot()' ifdeffery

The ifdeffery for atomic*_{fetch_,}andnot() is unlike that for all the
other atomics. If atomic*_andnot() is not defined, the corresponding
atomic*_fetch_andnot() is assumed to not be defined.

Additionally, the fallbacks for the various ordering cases are written
much later in atomic.h as static inlines.

This isn't problematic today, but gets in the way of scripting the
generation of atomics. To prepare for scripting, this patch:

* Switches to separate ifdefs for atomic*_andnot() and
  atomic*_fetch_andnot(), updating implementations as appropriate.

* Moves the fallbacks into the standards ifdefs, as macro expansions
  rather than static inlines.

* Removes trivial andnot implementations from architectures, where these
  are superseded by core code.

There should be no functional change as a result of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Reviewed-by: default avatarWill Deacon <will.deacon@arm.com>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-19-mark.rutland@arm.comSigned-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent b3a2a05f
...@@ -188,6 +188,7 @@ ATOMIC_OPS(add, +=, add) ...@@ -188,6 +188,7 @@ ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub) ATOMIC_OPS(sub, -=, sub)
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
#define atomic_fetch_andnot atomic_fetch_andnot
#undef ATOMIC_OPS #undef ATOMIC_OPS
#define ATOMIC_OPS(op, c_op, asm_op) \ #define ATOMIC_OPS(op, c_op, asm_op) \
...@@ -296,8 +297,6 @@ ATOMIC_OPS(add, +=, CTOP_INST_AADD_DI_R2_R2_R3) ...@@ -296,8 +297,6 @@ ATOMIC_OPS(add, +=, CTOP_INST_AADD_DI_R2_R2_R3)
ATOMIC_FETCH_OP(op, c_op, asm_op) ATOMIC_FETCH_OP(op, c_op, asm_op)
ATOMIC_OPS(and, &=, CTOP_INST_AAND_DI_R2_R2_R3) ATOMIC_OPS(and, &=, CTOP_INST_AAND_DI_R2_R2_R3)
#define atomic_andnot(mask, v) atomic_and(~(mask), (v))
#define atomic_fetch_andnot(mask, v) atomic_fetch_and(~(mask), (v))
ATOMIC_OPS(or, |=, CTOP_INST_AOR_DI_R2_R2_R3) ATOMIC_OPS(or, |=, CTOP_INST_AOR_DI_R2_R2_R3)
ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3) ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
...@@ -431,6 +430,7 @@ static inline long long atomic64_fetch_##op(long long a, atomic64_t *v) \ ...@@ -431,6 +430,7 @@ static inline long long atomic64_fetch_##op(long long a, atomic64_t *v) \
ATOMIC64_FETCH_OP(op, op1, op2) ATOMIC64_FETCH_OP(op, op1, op2)
#define atomic64_andnot atomic64_andnot #define atomic64_andnot atomic64_andnot
#define atomic64_fetch_andnot atomic64_fetch_andnot
ATOMIC64_OPS(add, add.f, adc) ATOMIC64_OPS(add, add.f, adc)
ATOMIC64_OPS(sub, sub.f, sbc) ATOMIC64_OPS(sub, sub.f, sbc)
......
...@@ -216,6 +216,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new) ...@@ -216,6 +216,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret; return ret;
} }
#define atomic_fetch_andnot atomic_fetch_andnot
#endif /* __LINUX_ARM_ARCH__ */ #endif /* __LINUX_ARM_ARCH__ */
#define ATOMIC_OPS(op, c_op, asm_op) \ #define ATOMIC_OPS(op, c_op, asm_op) \
......
...@@ -354,12 +354,22 @@ ...@@ -354,12 +354,22 @@
#endif #endif
#endif /* atomic_fetch_and_relaxed */ #endif /* atomic_fetch_and_relaxed */
#ifdef atomic_andnot #ifndef atomic_andnot
/* atomic_fetch_andnot_relaxed */ #define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
#endif
#ifndef atomic_fetch_andnot_relaxed #ifndef atomic_fetch_andnot_relaxed
#ifndef atomic_fetch_andnot
#define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
#define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
#define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
#define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
#else /* atomic_fetch_andnot */
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
#define atomic_fetch_andnot_acquire atomic_fetch_andnot #define atomic_fetch_andnot_acquire atomic_fetch_andnot
#define atomic_fetch_andnot_release atomic_fetch_andnot #define atomic_fetch_andnot_release atomic_fetch_andnot
#endif /* atomic_fetch_andnot */
#else /* atomic_fetch_andnot_relaxed */ #else /* atomic_fetch_andnot_relaxed */
...@@ -378,7 +388,6 @@ ...@@ -378,7 +388,6 @@
__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
#endif #endif
#endif /* atomic_fetch_andnot_relaxed */ #endif /* atomic_fetch_andnot_relaxed */
#endif /* atomic_andnot */
/* atomic_fetch_xor_relaxed */ /* atomic_fetch_xor_relaxed */
#ifndef atomic_fetch_xor_relaxed #ifndef atomic_fetch_xor_relaxed
...@@ -655,33 +664,6 @@ static inline bool atomic_add_negative(int i, atomic_t *v) ...@@ -655,33 +664,6 @@ static inline bool atomic_add_negative(int i, atomic_t *v)
} }
#endif #endif
#ifndef atomic_andnot
static inline void atomic_andnot(int i, atomic_t *v)
{
atomic_and(~i, v);
}
static inline int atomic_fetch_andnot(int i, atomic_t *v)
{
return atomic_fetch_and(~i, v);
}
static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
{
return atomic_fetch_and_relaxed(~i, v);
}
static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
{
return atomic_fetch_and_acquire(~i, v);
}
static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
{
return atomic_fetch_and_release(~i, v);
}
#endif
#ifndef atomic_inc_unless_negative #ifndef atomic_inc_unless_negative
static inline bool atomic_inc_unless_negative(atomic_t *v) static inline bool atomic_inc_unless_negative(atomic_t *v)
{ {
...@@ -1029,12 +1011,22 @@ static inline int atomic_dec_if_positive(atomic_t *v) ...@@ -1029,12 +1011,22 @@ static inline int atomic_dec_if_positive(atomic_t *v)
#endif #endif
#endif /* atomic64_fetch_and_relaxed */ #endif /* atomic64_fetch_and_relaxed */
#ifdef atomic64_andnot #ifndef atomic64_andnot
/* atomic64_fetch_andnot_relaxed */ #define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
#endif
#ifndef atomic64_fetch_andnot_relaxed #ifndef atomic64_fetch_andnot_relaxed
#ifndef atomic64_fetch_andnot
#define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
#define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
#define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
#define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
#else /* atomic64_fetch_andnot */
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
#define atomic64_fetch_andnot_release atomic64_fetch_andnot #define atomic64_fetch_andnot_release atomic64_fetch_andnot
#endif /* atomic64_fetch_andnot */
#else /* atomic64_fetch_andnot_relaxed */ #else /* atomic64_fetch_andnot_relaxed */
...@@ -1053,7 +1045,6 @@ static inline int atomic_dec_if_positive(atomic_t *v) ...@@ -1053,7 +1045,6 @@ static inline int atomic_dec_if_positive(atomic_t *v)
__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
#endif #endif
#endif /* atomic64_fetch_andnot_relaxed */ #endif /* atomic64_fetch_andnot_relaxed */
#endif /* atomic64_andnot */
/* atomic64_fetch_xor_relaxed */ /* atomic64_fetch_xor_relaxed */
#ifndef atomic64_fetch_xor_relaxed #ifndef atomic64_fetch_xor_relaxed
...@@ -1262,33 +1253,6 @@ static inline bool atomic64_add_negative(long long i, atomic64_t *v) ...@@ -1262,33 +1253,6 @@ static inline bool atomic64_add_negative(long long i, atomic64_t *v)
} }
#endif #endif
#ifndef atomic64_andnot
static inline void atomic64_andnot(long long i, atomic64_t *v)
{
atomic64_and(~i, v);
}
static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
{
return atomic64_fetch_and(~i, v);
}
static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
{
return atomic64_fetch_and_relaxed(~i, v);
}
static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
{
return atomic64_fetch_and_acquire(~i, v);
}
static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
{
return atomic64_fetch_and_release(~i, v);
}
#endif
#ifndef atomic64_inc_unless_negative #ifndef atomic64_inc_unless_negative
static inline bool atomic64_inc_unless_negative(atomic64_t *v) static inline bool atomic64_inc_unless_negative(atomic64_t *v)
{ {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment