mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-12-13 22:14:20 +08:00
arm64: irqflags: Use ICC_PMR_EL1 for interrupt masking
Instead disabling interrupts by setting the PSR.I bit, use a priority higher than the one used for interrupts to mask them via PMR. When using PMR to disable interrupts, the value of PMR will be used instead of PSR.[DAIF] for the irqflags. Signed-off-by: Julien Thierry <julien.thierry@arm.com> Suggested-by: Daniel Thompson <daniel.thompson@linaro.org> Acked-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Reviewed-by: Catalin Marinas <catalin.marinas@arm.com> Cc: Will Deacon <will.deacon@arm.com> Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org> Cc: Oleg Nesterov <oleg@redhat.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
This commit is contained in:
parent
13b210ddf4
commit
4a503217ce
@ -44,6 +44,17 @@ efi_status_t __efi_rt_asm_wrapper(void *, const char *, ...);
|
||||
|
||||
#define ARCH_EFI_IRQ_FLAGS_MASK (PSR_D_BIT | PSR_A_BIT | PSR_I_BIT | PSR_F_BIT)
|
||||
|
||||
/*
|
||||
* Even when Linux uses IRQ priorities for IRQ disabling, EFI does not.
|
||||
* And EFI shouldn't really play around with priority masking as it is not aware
|
||||
* which priorities the OS has assigned to its interrupts.
|
||||
*/
|
||||
#define arch_efi_save_flags(state_flags) \
|
||||
((void)((state_flags) = read_sysreg(daif)))
|
||||
|
||||
#define arch_efi_restore_flags(state_flags) write_sysreg(state_flags, daif)
|
||||
|
||||
|
||||
/* arch specific definitions used by the stub code */
|
||||
|
||||
/*
|
||||
|
@ -18,7 +18,9 @@
|
||||
|
||||
#ifdef __KERNEL__
|
||||
|
||||
#include <asm/alternative.h>
|
||||
#include <asm/ptrace.h>
|
||||
#include <asm/sysreg.h>
|
||||
|
||||
/*
|
||||
* Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and
|
||||
@ -36,33 +38,27 @@
|
||||
/*
|
||||
* CPU interrupt mask handling.
|
||||
*/
|
||||
static inline unsigned long arch_local_irq_save(void)
|
||||
{
|
||||
unsigned long flags;
|
||||
asm volatile(
|
||||
"mrs %0, daif // arch_local_irq_save\n"
|
||||
"msr daifset, #2"
|
||||
: "=r" (flags)
|
||||
:
|
||||
: "memory");
|
||||
return flags;
|
||||
}
|
||||
|
||||
static inline void arch_local_irq_enable(void)
|
||||
{
|
||||
asm volatile(
|
||||
"msr daifclr, #2 // arch_local_irq_enable"
|
||||
:
|
||||
asm volatile(ALTERNATIVE(
|
||||
"msr daifclr, #2 // arch_local_irq_enable\n"
|
||||
"nop",
|
||||
"msr_s " __stringify(SYS_ICC_PMR_EL1) ",%0\n"
|
||||
"dsb sy",
|
||||
ARM64_HAS_IRQ_PRIO_MASKING)
|
||||
:
|
||||
: "r" (GIC_PRIO_IRQON)
|
||||
: "memory");
|
||||
}
|
||||
|
||||
static inline void arch_local_irq_disable(void)
|
||||
{
|
||||
asm volatile(
|
||||
"msr daifset, #2 // arch_local_irq_disable"
|
||||
:
|
||||
asm volatile(ALTERNATIVE(
|
||||
"msr daifset, #2 // arch_local_irq_disable",
|
||||
"msr_s " __stringify(SYS_ICC_PMR_EL1) ", %0",
|
||||
ARM64_HAS_IRQ_PRIO_MASKING)
|
||||
:
|
||||
: "r" (GIC_PRIO_IRQOFF)
|
||||
: "memory");
|
||||
}
|
||||
|
||||
@ -71,12 +67,44 @@ static inline void arch_local_irq_disable(void)
|
||||
*/
|
||||
static inline unsigned long arch_local_save_flags(void)
|
||||
{
|
||||
unsigned long daif_bits;
|
||||
unsigned long flags;
|
||||
asm volatile(
|
||||
"mrs %0, daif // arch_local_save_flags"
|
||||
: "=r" (flags)
|
||||
:
|
||||
|
||||
daif_bits = read_sysreg(daif);
|
||||
|
||||
/*
|
||||
* The asm is logically equivalent to:
|
||||
*
|
||||
* if (system_uses_irq_prio_masking())
|
||||
* flags = (daif_bits & PSR_I_BIT) ?
|
||||
* GIC_PRIO_IRQOFF :
|
||||
* read_sysreg_s(SYS_ICC_PMR_EL1);
|
||||
* else
|
||||
* flags = daif_bits;
|
||||
*/
|
||||
asm volatile(ALTERNATIVE(
|
||||
"mov %0, %1\n"
|
||||
"nop\n"
|
||||
"nop",
|
||||
"mrs_s %0, " __stringify(SYS_ICC_PMR_EL1) "\n"
|
||||
"ands %1, %1, " __stringify(PSR_I_BIT) "\n"
|
||||
"csel %0, %0, %2, eq",
|
||||
ARM64_HAS_IRQ_PRIO_MASKING)
|
||||
: "=&r" (flags), "+r" (daif_bits)
|
||||
: "r" (GIC_PRIO_IRQOFF)
|
||||
: "memory");
|
||||
|
||||
return flags;
|
||||
}
|
||||
|
||||
static inline unsigned long arch_local_irq_save(void)
|
||||
{
|
||||
unsigned long flags;
|
||||
|
||||
flags = arch_local_save_flags();
|
||||
|
||||
arch_local_irq_disable();
|
||||
|
||||
return flags;
|
||||
}
|
||||
|
||||
@ -85,16 +113,32 @@ static inline unsigned long arch_local_save_flags(void)
|
||||
*/
|
||||
static inline void arch_local_irq_restore(unsigned long flags)
|
||||
{
|
||||
asm volatile(
|
||||
"msr daif, %0 // arch_local_irq_restore"
|
||||
:
|
||||
: "r" (flags)
|
||||
: "memory");
|
||||
asm volatile(ALTERNATIVE(
|
||||
"msr daif, %0\n"
|
||||
"nop",
|
||||
"msr_s " __stringify(SYS_ICC_PMR_EL1) ", %0\n"
|
||||
"dsb sy",
|
||||
ARM64_HAS_IRQ_PRIO_MASKING)
|
||||
: "+r" (flags)
|
||||
:
|
||||
: "memory");
|
||||
}
|
||||
|
||||
static inline int arch_irqs_disabled_flags(unsigned long flags)
|
||||
{
|
||||
return flags & PSR_I_BIT;
|
||||
int res;
|
||||
|
||||
asm volatile(ALTERNATIVE(
|
||||
"and %w0, %w1, #" __stringify(PSR_I_BIT) "\n"
|
||||
"nop",
|
||||
"cmp %w1, #" __stringify(GIC_PRIO_IRQOFF) "\n"
|
||||
"cset %w0, ls",
|
||||
ARM64_HAS_IRQ_PRIO_MASKING)
|
||||
: "=&r" (res)
|
||||
: "r" ((int) flags)
|
||||
: "memory");
|
||||
|
||||
return res;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user