Commit ce968f65 authored by Heiko Carstens's avatar Heiko Carstens

s390/cmpxchg: make variables local to each case label

Make variables local to each case label. This limits the scope of
variables and allows to use proper types everywhere.

Link: https://lore.kernel.org/r/Y2J7+HqgAZwnfxsh@osirisSigned-off-by: default avatarHeiko Carstens <hca@linux.ibm.com>
parent 13f62e84
...@@ -88,11 +88,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, ...@@ -88,11 +88,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address,
unsigned long old, unsigned long old,
unsigned long new, int size) unsigned long new, int size)
{ {
unsigned long prev, tmp;
int shift;
switch (size) { switch (size) {
case 1: case 1: {
unsigned int prev, tmp, shift;
shift = (3 ^ (address & 3)) << 3; shift = (3 ^ (address & 3)) << 3;
address ^= address & 3; address ^= address & 3;
asm volatile( asm volatile(
...@@ -115,7 +114,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, ...@@ -115,7 +114,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address,
[mask] "d" (~(0xff << shift)) [mask] "d" (~(0xff << shift))
: "memory", "cc"); : "memory", "cc");
return prev >> shift; return prev >> shift;
case 2: }
case 2: {
unsigned int prev, tmp, shift;
shift = (2 ^ (address & 2)) << 3; shift = (2 ^ (address & 2)) << 3;
address ^= address & 2; address ^= address & 2;
asm volatile( asm volatile(
...@@ -138,16 +140,22 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, ...@@ -138,16 +140,22 @@ static __always_inline unsigned long __cmpxchg(unsigned long address,
[mask] "d" (~(0xffff << shift)) [mask] "d" (~(0xffff << shift))
: "memory", "cc"); : "memory", "cc");
return prev >> shift; return prev >> shift;
case 4: }
case 4: {
unsigned int prev;
asm volatile( asm volatile(
" cs %[prev],%[new],%[address]\n" " cs %[prev],%[new],%[address]\n"
: [prev] "=&d" (prev), : [prev] "=&d" (prev),
[address] "+Q" (*(int *)address) [address] "+Q" (*(int *)address)
: "0" (old), : "0" ((unsigned int)old),
[new] "d" (new) [new] "d" (new)
: "memory", "cc"); : "memory", "cc");
return prev; return prev;
case 8: }
case 8: {
unsigned long prev;
asm volatile( asm volatile(
" csg %[prev],%[new],%[address]\n" " csg %[prev],%[new],%[address]\n"
: [prev] "=&d" (prev), : [prev] "=&d" (prev),
...@@ -157,6 +165,7 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, ...@@ -157,6 +165,7 @@ static __always_inline unsigned long __cmpxchg(unsigned long address,
: "memory", "cc"); : "memory", "cc");
return prev; return prev;
} }
}
__cmpxchg_called_with_bad_pointer(); __cmpxchg_called_with_bad_pointer();
return old; return old;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment