x86/cpu: Rename srso_(.*)_alias to srso_alias_\1

For a more consistent namespace.

  [ bp: Fixup names in the doc too. ]

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de>
Link: https://lore.kernel.org/r/20230814121148.976236447@infradead.org
This commit is contained in:
Peter Zijlstra 2023-08-14 13:44:33 +02:00 committed by Borislav Petkov (AMD)
parent d025b7bac0
commit 42be649dd1
4 changed files with 22 additions and 22 deletions

View File

@ -141,8 +141,8 @@ sequence.
To ensure the safety of this mitigation, the kernel must ensure that the To ensure the safety of this mitigation, the kernel must ensure that the
safe return sequence is itself free from attacker interference. In Zen3 safe return sequence is itself free from attacker interference. In Zen3
and Zen4, this is accomplished by creating a BTB alias between the and Zen4, this is accomplished by creating a BTB alias between the
untraining function srso_untrain_ret_alias() and the safe return untraining function srso_alias_untrain_ret() and the safe return
function srso_safe_ret_alias() which results in evicting a potentially function srso_alias_safe_ret() which results in evicting a potentially
poisoned BTB entry and using that safe one for all function returns. poisoned BTB entry and using that safe one for all function returns.
In older Zen1 and Zen2, this is accomplished using a reinterpretation In older Zen1 and Zen2, this is accomplished using a reinterpretation

View File

@ -300,7 +300,7 @@
#ifdef CONFIG_CPU_SRSO #ifdef CONFIG_CPU_SRSO
ALTERNATIVE_2 "", "call srso_untrain_ret", X86_FEATURE_SRSO, \ ALTERNATIVE_2 "", "call srso_untrain_ret", X86_FEATURE_SRSO, \
"call srso_untrain_ret_alias", X86_FEATURE_SRSO_ALIAS "call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
#endif #endif
.endm .endm
@ -316,7 +316,7 @@
#ifdef CONFIG_CPU_SRSO #ifdef CONFIG_CPU_SRSO
ALTERNATIVE_2 "", "call srso_untrain_ret", X86_FEATURE_SRSO, \ ALTERNATIVE_2 "", "call srso_untrain_ret", X86_FEATURE_SRSO, \
"call srso_untrain_ret_alias", X86_FEATURE_SRSO_ALIAS "call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
#endif #endif
.endm .endm
@ -353,7 +353,7 @@ extern void srso_alias_return_thunk(void);
extern void retbleed_untrain_ret(void); extern void retbleed_untrain_ret(void);
extern void srso_untrain_ret(void); extern void srso_untrain_ret(void);
extern void srso_untrain_ret_alias(void); extern void srso_alias_untrain_ret(void);
extern void entry_ibpb(void); extern void entry_ibpb(void);

View File

@ -147,10 +147,10 @@ SECTIONS
#ifdef CONFIG_CPU_SRSO #ifdef CONFIG_CPU_SRSO
/* /*
* See the comment above srso_untrain_ret_alias()'s * See the comment above srso_alias_untrain_ret()'s
* definition. * definition.
*/ */
. = srso_untrain_ret_alias | (1 << 2) | (1 << 8) | (1 << 14) | (1 << 20); . = srso_alias_untrain_ret | (1 << 2) | (1 << 8) | (1 << 14) | (1 << 20);
*(.text..__x86.rethunk_safe) *(.text..__x86.rethunk_safe)
#endif #endif
ALIGN_ENTRY_TEXT_END ALIGN_ENTRY_TEXT_END
@ -536,8 +536,8 @@ INIT_PER_CPU(irq_stack_backing_store);
* Instead do: (A | B) - (A & B) in order to compute the XOR * Instead do: (A | B) - (A & B) in order to compute the XOR
* of the two function addresses: * of the two function addresses:
*/ */
. = ASSERT(((ABSOLUTE(srso_untrain_ret_alias) | srso_safe_ret_alias) - . = ASSERT(((ABSOLUTE(srso_alias_untrain_ret) | srso_alias_safe_ret) -
(ABSOLUTE(srso_untrain_ret_alias) & srso_safe_ret_alias)) == ((1 << 2) | (1 << 8) | (1 << 14) | (1 << 20)), (ABSOLUTE(srso_alias_untrain_ret) & srso_alias_safe_ret)) == ((1 << 2) | (1 << 8) | (1 << 14) | (1 << 20)),
"SRSO function pair won't alias"); "SRSO function pair won't alias");
#endif #endif

View File

@ -133,56 +133,56 @@ SYM_CODE_END(__x86_indirect_jump_thunk_array)
#ifdef CONFIG_RETHUNK #ifdef CONFIG_RETHUNK
/* /*
* srso_untrain_ret_alias() and srso_safe_ret_alias() are placed at * srso_alias_untrain_ret() and srso_alias_safe_ret() are placed at
* special addresses: * special addresses:
* *
* - srso_untrain_ret_alias() is 2M aligned * - srso_alias_untrain_ret() is 2M aligned
* - srso_safe_ret_alias() is also in the same 2M page but bits 2, 8, 14 * - srso_alias_safe_ret() is also in the same 2M page but bits 2, 8, 14
* and 20 in its virtual address are set (while those bits in the * and 20 in its virtual address are set (while those bits in the
* srso_untrain_ret_alias() function are cleared). * srso_alias_untrain_ret() function are cleared).
* *
* This guarantees that those two addresses will alias in the branch * This guarantees that those two addresses will alias in the branch
* target buffer of Zen3/4 generations, leading to any potential * target buffer of Zen3/4 generations, leading to any potential
* poisoned entries at that BTB slot to get evicted. * poisoned entries at that BTB slot to get evicted.
* *
* As a result, srso_safe_ret_alias() becomes a safe return. * As a result, srso_alias_safe_ret() becomes a safe return.
*/ */
#ifdef CONFIG_CPU_SRSO #ifdef CONFIG_CPU_SRSO
.section .text..__x86.rethunk_untrain .section .text..__x86.rethunk_untrain
SYM_START(srso_untrain_ret_alias, SYM_L_GLOBAL, SYM_A_NONE) SYM_START(srso_alias_untrain_ret, SYM_L_GLOBAL, SYM_A_NONE)
UNWIND_HINT_FUNC UNWIND_HINT_FUNC
ANNOTATE_NOENDBR ANNOTATE_NOENDBR
ASM_NOP2 ASM_NOP2
lfence lfence
jmp srso_alias_return_thunk jmp srso_alias_return_thunk
SYM_FUNC_END(srso_untrain_ret_alias) SYM_FUNC_END(srso_alias_untrain_ret)
__EXPORT_THUNK(srso_untrain_ret_alias) __EXPORT_THUNK(srso_alias_untrain_ret)
.section .text..__x86.rethunk_safe .section .text..__x86.rethunk_safe
#else #else
/* dummy definition for alternatives */ /* dummy definition for alternatives */
SYM_START(srso_untrain_ret_alias, SYM_L_GLOBAL, SYM_A_NONE) SYM_START(srso_alias_untrain_ret, SYM_L_GLOBAL, SYM_A_NONE)
ANNOTATE_UNRET_SAFE ANNOTATE_UNRET_SAFE
ret ret
int3 int3
SYM_FUNC_END(srso_untrain_ret_alias) SYM_FUNC_END(srso_alias_untrain_ret)
#endif #endif
SYM_START(srso_safe_ret_alias, SYM_L_GLOBAL, SYM_A_NONE) SYM_START(srso_alias_safe_ret, SYM_L_GLOBAL, SYM_A_NONE)
lea 8(%_ASM_SP), %_ASM_SP lea 8(%_ASM_SP), %_ASM_SP
UNWIND_HINT_FUNC UNWIND_HINT_FUNC
ANNOTATE_UNRET_SAFE ANNOTATE_UNRET_SAFE
ret ret
int3 int3
SYM_FUNC_END(srso_safe_ret_alias) SYM_FUNC_END(srso_alias_safe_ret)
.section .text..__x86.return_thunk .section .text..__x86.return_thunk
SYM_CODE_START(srso_alias_return_thunk) SYM_CODE_START(srso_alias_return_thunk)
UNWIND_HINT_FUNC UNWIND_HINT_FUNC
ANNOTATE_NOENDBR ANNOTATE_NOENDBR
call srso_safe_ret_alias call srso_alias_safe_ret
ud2 ud2
SYM_CODE_END(srso_alias_return_thunk) SYM_CODE_END(srso_alias_return_thunk)