Commit bef82820 authored by Mark Rutland's avatar Mark Rutland Committed by Ingo Molnar

atomics/treewide: Make atomic64_inc_not_zero() optional

We define a trivial fallback for atomic_inc_not_zero(), but don't do
the same for atomic64_inc_not_zero(), leading most architectures to
define the same boilerplate.

Let's add a fallback in <linux/atomic.h>, and remove the redundant
implementations. Note that atomic64_add_unless() is always defined in
<linux/atomic.h>, and promotes its arguments to the requisite types, so
we need not do this explicitly.

There should be no functional change as a result of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Reviewed-by: default avatarWill Deacon <will.deacon@arm.com>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: default avatarPalmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-6-mark.rutland@arm.comSigned-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent ade5ef92
...@@ -296,8 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) ...@@ -296,8 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
return old - 1; return old - 1;
} }
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
......
...@@ -603,7 +603,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) ...@@ -603,7 +603,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
#define atomic64_dec(v) atomic64_sub(1LL, (v)) #define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* !CONFIG_GENERIC_ATOMIC64 */ #endif /* !CONFIG_GENERIC_ATOMIC64 */
......
...@@ -534,7 +534,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) ...@@ -534,7 +534,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
#define atomic64_dec(v) atomic64_sub(1LL, (v)) #define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v)) #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* !CONFIG_GENERIC_ATOMIC64 */ #endif /* !CONFIG_GENERIC_ATOMIC64 */
#endif #endif
......
...@@ -204,7 +204,5 @@ ...@@ -204,7 +204,5 @@
#define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u) #define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u)
#define atomic64_andnot atomic64_andnot #define atomic64_andnot atomic64_andnot
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#endif #endif
#endif #endif
...@@ -246,8 +246,6 @@ static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -246,8 +246,6 @@ static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u); return c != (u);
} }
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
static __inline__ long atomic64_dec_if_positive(atomic64_t *v) static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
{ {
long c, old, dec; long c, old, dec;
......
...@@ -644,8 +644,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -644,8 +644,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u); return c != (u);
} }
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#define atomic64_dec_return(v) atomic64_sub_return(1, (v)) #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
#define atomic64_inc_return(v) atomic64_add_return(1, (v)) #define atomic64_inc_return(v) atomic64_add_return(1, (v))
......
...@@ -305,8 +305,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -305,8 +305,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u); return c != (u);
} }
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
/* /*
* atomic64_dec_if_positive - decrement by 1 if old value positive * atomic64_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic_t * @v: pointer of type atomic_t
......
...@@ -582,6 +582,7 @@ static __inline__ int atomic64_inc_not_zero(atomic64_t *v) ...@@ -582,6 +582,7 @@ static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
return t1 != 0; return t1 != 0;
} }
#define atomic64_inc_not_zero(v) atomic64_inc_not_zero((v))
#endif /* __powerpc64__ */ #endif /* __powerpc64__ */
......
...@@ -375,13 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -375,13 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
} }
#endif #endif
#ifndef CONFIG_GENERIC_ATOMIC64
static __always_inline long atomic64_inc_not_zero(atomic64_t *v)
{
return atomic64_add_unless(v, 1, 0);
}
#endif
/* /*
* atomic_{cmp,}xchg is required to have exactly the same ordering semantics as * atomic_{cmp,}xchg is required to have exactly the same ordering semantics as
* {cmp,}xchg and the operations that return, so they need a full barrier. * {cmp,}xchg and the operations that return, so they need a full barrier.
......
...@@ -212,6 +212,5 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) ...@@ -212,6 +212,5 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
#define atomic64_dec(_v) atomic64_sub(1, _v) #define atomic64_dec(_v) atomic64_sub(1, _v)
#define atomic64_dec_return(_v) atomic64_sub_return(1, _v) #define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0) #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#endif /* __ARCH_S390_ATOMIC__ */ #endif /* __ARCH_S390_ATOMIC__ */
...@@ -123,8 +123,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -123,8 +123,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
return c != (u); return c != (u);
} }
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
long atomic64_dec_if_positive(atomic64_t *v); long atomic64_dec_if_positive(atomic64_t *v);
#endif /* !(__ARCH_SPARC64_ATOMIC__) */ #endif /* !(__ARCH_SPARC64_ATOMIC__) */
...@@ -295,7 +295,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, long long a, ...@@ -295,7 +295,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, long long a,
return (int)a; return (int)a;
} }
#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
static inline int arch_atomic64_inc_not_zero(atomic64_t *v) static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
{ {
int r; int r;
......
...@@ -207,8 +207,6 @@ static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -207,8 +207,6 @@ static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u)
return true; return true;
} }
#define arch_atomic64_inc_not_zero(v) arch_atomic64_add_unless((v), 1, 0)
/* /*
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive * arch_atomic64_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic_t * @v: pointer of type atomic_t
......
...@@ -205,11 +205,14 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v) ...@@ -205,11 +205,14 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v)
return arch_atomic64_dec_return(v); return arch_atomic64_dec_return(v);
} }
#ifdef arch_atomic64_inc_not_zero
#define atomic64_inc_not_zero atomic64_inc_not_zero
static __always_inline bool atomic64_inc_not_zero(atomic64_t *v) static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
{ {
kasan_check_write(v, sizeof(*v)); kasan_check_write(v, sizeof(*v));
return arch_atomic64_inc_not_zero(v); return arch_atomic64_inc_not_zero(v);
} }
#endif
static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v) static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
{ {
......
...@@ -63,6 +63,5 @@ extern bool atomic64_add_unless(atomic64_t *v, long long a, long long u); ...@@ -63,6 +63,5 @@ extern bool atomic64_add_unless(atomic64_t *v, long long a, long long u);
#define atomic64_dec(v) atomic64_sub(1LL, (v)) #define atomic64_dec(v) atomic64_sub(1LL, (v))
#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#endif /* _ASM_GENERIC_ATOMIC64_H */ #endif /* _ASM_GENERIC_ATOMIC64_H */
...@@ -1019,6 +1019,17 @@ static inline int atomic_dec_if_positive(atomic_t *v) ...@@ -1019,6 +1019,17 @@ static inline int atomic_dec_if_positive(atomic_t *v)
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
#endif /* atomic64_try_cmpxchg */ #endif /* atomic64_try_cmpxchg */
/**
* atomic64_inc_not_zero - increment unless the number is zero
* @v: pointer of type atomic64_t
*
* Atomically increments @v by 1, if @v is non-zero.
* Returns true if the increment was done.
*/
#ifndef atomic64_inc_not_zero
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#endif
#ifndef atomic64_andnot #ifndef atomic64_andnot
static inline void atomic64_andnot(long long i, atomic64_t *v) static inline void atomic64_andnot(long long i, atomic64_t *v)
{ {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment