mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2025-01-24 23:04:17 +08:00
addfc38672
When building for LSE atomics (CONFIG_ARM64_LSE_ATOMICS), if the hardware or toolchain doesn't support it the existing code will fallback to ll/sc atomics. It achieves this by branching from inline assembly to a function that is built with special compile flags. Further this results in the clobbering of registers even when the fallback isn't used increasing register pressure. Improve this by providing inline implementations of both LSE and ll/sc and use a static key to select between them, which allows for the compiler to generate better atomics code. Put the LL/SC fallback atomics in their own subsection to improve icache performance. Signed-off-by: Andrew Murray <andrew.murray@arm.com> Signed-off-by: Will Deacon <will@kernel.org>
47 lines
937 B
C
47 lines
937 B
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#ifndef __ASM_LSE_H
|
|
#define __ASM_LSE_H
|
|
|
|
#if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS)
|
|
|
|
#include <linux/compiler_types.h>
|
|
#include <linux/export.h>
|
|
#include <linux/stringify.h>
|
|
#include <asm/alternative.h>
|
|
#include <asm/cpucaps.h>
|
|
|
|
#ifdef __ASSEMBLER__
|
|
|
|
.arch_extension lse
|
|
|
|
.macro alt_lse, llsc, lse
|
|
alternative_insn "\llsc", "\lse", ARM64_HAS_LSE_ATOMICS
|
|
.endm
|
|
|
|
#else /* __ASSEMBLER__ */
|
|
|
|
__asm__(".arch_extension lse");
|
|
|
|
|
|
/* In-line patching at runtime */
|
|
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) \
|
|
ALTERNATIVE(llsc, lse, ARM64_HAS_LSE_ATOMICS)
|
|
|
|
#endif /* __ASSEMBLER__ */
|
|
#else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
|
|
|
|
#ifdef __ASSEMBLER__
|
|
|
|
.macro alt_lse, llsc, lse
|
|
\llsc
|
|
.endm
|
|
|
|
#else /* __ASSEMBLER__ */
|
|
|
|
|
|
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc
|
|
|
|
#endif /* __ASSEMBLER__ */
|
|
#endif /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
|
|
#endif /* __ASM_LSE_H */
|