Commit 326ed6a9 authored by Scott Wood's avatar Scott Wood Committed by Benjamin Herrenschmidt

powerpc: mtspr/mtmsr should take an unsigned long

Add a cast in case the caller passes in a different type, as it would
if mtspr/mtmsr were functions.

Previously, if a 64-bit type was passed in on 32-bit, GCC would bind the
constraint to a pair of registers, and would substitute the first register
in the pair in the asm code.  This corresponds to the upper half of the
64-bit register, which is generally not the desired behavior.
Signed-off-by: default avatarScott Wood <scottwood@freescale.com>
Signed-off-by: default avatarBenjamin Herrenschmidt <benh@kernel.crashing.org>
parent 53d1e658
...@@ -1024,13 +1024,16 @@ ...@@ -1024,13 +1024,16 @@
#define mtmsrd(v) __mtmsrd((v), 0) #define mtmsrd(v) __mtmsrd((v), 0)
#define mtmsr(v) mtmsrd(v) #define mtmsr(v) mtmsrd(v)
#else #else
#define mtmsr(v) asm volatile("mtmsr %0" : : "r" (v) : "memory") #define mtmsr(v) asm volatile("mtmsr %0" : \
: "r" ((unsigned long)(v)) \
: "memory")
#endif #endif
#define mfspr(rn) ({unsigned long rval; \ #define mfspr(rn) ({unsigned long rval; \
asm volatile("mfspr %0," __stringify(rn) \ asm volatile("mfspr %0," __stringify(rn) \
: "=r" (rval)); rval;}) : "=r" (rval)); rval;})
#define mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : : "r" (v)\ #define mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : \
: "r" ((unsigned long)(v)) \
: "memory") : "memory")
#ifdef __powerpc64__ #ifdef __powerpc64__
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment