Commit b6e25929 authored by Rohan McLure's avatar Rohan McLure Committed by Michael Ellerman

powerpc/kcsan: Memory barriers semantics

Annotate memory barriers *mb() with calls to kcsan_mb(), signaling to
compilers supporting KCSAN that the respective memory barrier has been
issued. Rename memory barrier *mb() to __*mb() to opt in for
asm-generic/barrier.h to generate the respective *mb() macro.
Signed-off-by: default avatarRohan McLure <rmclure@linux.ibm.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20230206021801.105268-4-rmclure@linux.ibm.com
parent 2a7ce82d
......@@ -35,9 +35,9 @@
* However, on CPUs that don't support lwsync, lwsync actually maps to a
* heavy-weight sync, so smp_wmb() can be a lighter-weight eieio.
*/
#define mb() __asm__ __volatile__ ("sync" : : : "memory")
#define rmb() __asm__ __volatile__ ("sync" : : : "memory")
#define wmb() __asm__ __volatile__ ("sync" : : : "memory")
#define __mb() __asm__ __volatile__ ("sync" : : : "memory")
#define __rmb() __asm__ __volatile__ ("sync" : : : "memory")
#define __wmb() __asm__ __volatile__ ("sync" : : : "memory")
/* The sub-arch has lwsync */
#if defined(CONFIG_PPC64) || defined(CONFIG_PPC_E500MC)
......@@ -51,12 +51,12 @@
/* clang defines this macro for a builtin, which will not work with runtime patching */
#undef __lwsync
#define __lwsync() __asm__ __volatile__ (stringify_in_c(LWSYNC) : : :"memory")
#define dma_rmb() __lwsync()
#define dma_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
#define __dma_rmb() __lwsync()
#define __dma_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
#define __smp_lwsync() __lwsync()
#define __smp_mb() mb()
#define __smp_mb() __mb()
#define __smp_rmb() __lwsync()
#define __smp_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment