mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-25 13:43:55 +08:00
powerpc: Add compiler memory barrier to mtmsr macro
On 32-bit non-Book E, local_irq_restore() turns into just mtmsr(), which doesn't currently have a compiler memory barrier. This means that accesses to memory inside a local_irq_save/restore section, or a spin_lock_irqsave/spin_unlock_irqrestore section on UP, can be reordered by the compiler to occur outside that section. To fix this, this adds a compiler memory barrier to mtmsr for both 32-bit and 64-bit. Having a compiler memory barrier in mtmsr makes sense because it will almost always be changing something about the context in which memory accesses are done, so in general we don't want memory accesses getting moved from one side of an mtmsr to the other. With the barrier in mtmsr(), some of the explicit barriers in hw_irq.h are now redundant, so this removes them. Signed-off-by: Paul Mackerras <paulus@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
This commit is contained in:
parent
e8d1673b97
commit
4c75f84f2c
@ -80,7 +80,7 @@ static inline void local_irq_disable(void)
|
||||
__asm__ __volatile__("wrteei 0": : :"memory");
|
||||
#else
|
||||
unsigned long msr;
|
||||
__asm__ __volatile__("": : :"memory");
|
||||
|
||||
msr = mfmsr();
|
||||
SET_MSR_EE(msr & ~MSR_EE);
|
||||
#endif
|
||||
@ -92,7 +92,7 @@ static inline void local_irq_enable(void)
|
||||
__asm__ __volatile__("wrteei 1": : :"memory");
|
||||
#else
|
||||
unsigned long msr;
|
||||
__asm__ __volatile__("": : :"memory");
|
||||
|
||||
msr = mfmsr();
|
||||
SET_MSR_EE(msr | MSR_EE);
|
||||
#endif
|
||||
@ -108,7 +108,6 @@ static inline void local_irq_save_ptr(unsigned long *flags)
|
||||
#else
|
||||
SET_MSR_EE(msr & ~MSR_EE);
|
||||
#endif
|
||||
__asm__ __volatile__("": : :"memory");
|
||||
}
|
||||
|
||||
#define local_save_flags(flags) ((flags) = mfmsr())
|
||||
|
@ -745,11 +745,11 @@
|
||||
asm volatile("mfmsr %0" : "=r" (rval)); rval;})
|
||||
#ifdef CONFIG_PPC64
|
||||
#define __mtmsrd(v, l) asm volatile("mtmsrd %0," __stringify(l) \
|
||||
: : "r" (v))
|
||||
: : "r" (v) : "memory")
|
||||
#define mtmsrd(v) __mtmsrd((v), 0)
|
||||
#define mtmsr(v) mtmsrd(v)
|
||||
#else
|
||||
#define mtmsr(v) asm volatile("mtmsr %0" : : "r" (v))
|
||||
#define mtmsr(v) asm volatile("mtmsr %0" : : "r" (v) : "memory")
|
||||
#endif
|
||||
|
||||
#define mfspr(rn) ({unsigned long rval; \
|
||||
|
Loading…
Reference in New Issue
Block a user