diff options
author | Heiko Carstens <hca@linux.ibm.com> | 2022-11-02 15:17:28 +0100 |
---|---|---|
committer | Heiko Carstens <hca@linux.ibm.com> | 2022-11-21 13:36:15 +0100 |
commit | ce968f654570dbd9cac7de694681640061559d3b (patch) | |
tree | bd55b4133ee8c7eb56eff57ce34fd812722bcdf5 | |
parent | 13f62e84385fa0241fc6a2178da50af02189121b (diff) | |
download | linux-next-ce968f654570dbd9cac7de694681640061559d3b.tar.gz |
s390/cmpxchg: make variables local to each case label
Make variables local to each case label. This limits the scope of
variables and allows to use proper types everywhere.
Link: https://lore.kernel.org/r/Y2J7+HqgAZwnfxsh@osiris
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
-rw-r--r-- | arch/s390/include/asm/cmpxchg.h | 25 |
1 files changed, 17 insertions, 8 deletions
diff --git a/arch/s390/include/asm/cmpxchg.h b/arch/s390/include/asm/cmpxchg.h index 56fb8aa08945..02165acdaa93 100644 --- a/arch/s390/include/asm/cmpxchg.h +++ b/arch/s390/include/asm/cmpxchg.h @@ -88,11 +88,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, unsigned long old, unsigned long new, int size) { - unsigned long prev, tmp; - int shift; - switch (size) { - case 1: + case 1: { + unsigned int prev, tmp, shift; + shift = (3 ^ (address & 3)) << 3; address ^= address & 3; asm volatile( @@ -115,7 +114,10 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, [mask] "d" (~(0xff << shift)) : "memory", "cc"); return prev >> shift; - case 2: + } + case 2: { + unsigned int prev, tmp, shift; + shift = (2 ^ (address & 2)) << 3; address ^= address & 2; asm volatile( @@ -138,16 +140,22 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, [mask] "d" (~(0xffff << shift)) : "memory", "cc"); return prev >> shift; - case 4: + } + case 4: { + unsigned int prev; + asm volatile( " cs %[prev],%[new],%[address]\n" : [prev] "=&d" (prev), [address] "+Q" (*(int *)address) - : "0" (old), + : "0" ((unsigned int)old), [new] "d" (new) : "memory", "cc"); return prev; - case 8: + } + case 8: { + unsigned long prev; + asm volatile( " csg %[prev],%[new],%[address]\n" : [prev] "=&d" (prev), @@ -157,6 +165,7 @@ static __always_inline unsigned long __cmpxchg(unsigned long address, : "memory", "cc"); return prev; } + } __cmpxchg_called_with_bad_pointer(); return old; } |