Commit 2a55550f authored by Kees Cook's avatar Kees Cook

m68k: cmpxchg: Dereference matching size

Similar to the recent arm64 fix[1], avoid overly wide casts in the m68k
cmpxchg implementation. Avoids this warning under -Warray-bounds with
GCC 11:

net/sched/cls_tcindex.c: In function 'tcindex_set_parms':
./arch/m68k/include/asm/cmpxchg.h:64:17: warning: array subscript 'volatile struct __xchg_dummy[0]' is partly outside array bounds of 'struct tcf_result[1]' [-Warray-bounds]
   64 |                 __asm__ __volatile__
      |                 ^~~~~~~
net/sched/cls_tcindex.c:338:27: note: while referencing 'cr'
  338 |         struct tcf_result cr = {};
      |                           ^~

No binary output difference are seen from this change.

[1] commit 3364c6ce ("arm64: atomics: lse: Dereference matching size")

Cc: "Peter Zijlstra (Intel)" <peterz@infradead.org>
Cc: Mark Rutland <mark.rutland@arm.com>
Cc: Greg Ungerer <gerg@linux-m68k.org>
Cc: linux-m68k@lists.linux-m68k.org
Acked-by: default avatarGeert Uytterhoeven <geert@linux-m68k.org>
Link: https://lore.kernel.org/lkml/CAMuHMdVRrD+2zKoHxAaQdDuiK5JFDanbv0SJ91OdWfx+eyekPQ@mail.gmail.comSigned-off-by: default avatarKees Cook <keescook@chromium.org>
parent 9ed0a59c
...@@ -4,8 +4,7 @@ ...@@ -4,8 +4,7 @@
#include <linux/irqflags.h> #include <linux/irqflags.h>
struct __xchg_dummy { unsigned long a[100]; }; #define __xg(type, x) ((volatile type *)(x))
#define __xg(x) ((volatile struct __xchg_dummy *)(x))
extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int); extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
...@@ -50,7 +49,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -50,7 +49,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
"1:\n\t" "1:\n\t"
"casb %0,%1,%2\n\t" "casb %0,%1,%2\n\t"
"jne 1b" "jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); : "=&d" (x) : "d" (x), "m" (*__xg(u8, ptr)) : "memory");
break; break;
case 2: case 2:
__asm__ __volatile__ __asm__ __volatile__
...@@ -58,7 +57,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -58,7 +57,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
"1:\n\t" "1:\n\t"
"casw %0,%1,%2\n\t" "casw %0,%1,%2\n\t"
"jne 1b" "jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); : "=&d" (x) : "d" (x), "m" (*__xg(u16, ptr)) : "memory");
break; break;
case 4: case 4:
__asm__ __volatile__ __asm__ __volatile__
...@@ -66,7 +65,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -66,7 +65,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
"1:\n\t" "1:\n\t"
"casl %0,%1,%2\n\t" "casl %0,%1,%2\n\t"
"jne 1b" "jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); : "=&d" (x) : "d" (x), "m" (*__xg(u32, ptr)) : "memory");
break; break;
default: default:
x = __invalid_xchg_size(x, ptr, size); x = __invalid_xchg_size(x, ptr, size);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment