2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-24 21:24:00 +08:00

arm64: kernel: Convert to modern annotations for assembly functions

In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the core kernel code to
the new macros.

Signed-off-by: Mark Brown <broonie@kernel.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20200501115430.37315-3-broonie@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
This commit is contained in:
Mark Brown 2020-05-01 12:54:29 +01:00 committed by Will Deacon
parent 06607c7e93
commit 0343a7e463
10 changed files with 68 additions and 68 deletions

View File

@ -29,7 +29,7 @@
* branch to what would be the reset vector. It must be executed with the
* flat identity mapping.
*/
ENTRY(__cpu_soft_restart)
SYM_CODE_START(__cpu_soft_restart)
/* Clear sctlr_el1 flags. */
mrs x12, sctlr_el1
mov_q x13, SCTLR_ELx_FLAGS
@ -47,6 +47,6 @@ ENTRY(__cpu_soft_restart)
mov x1, x3 // arg1
mov x2, x4 // arg2
br x8
ENDPROC(__cpu_soft_restart)
SYM_CODE_END(__cpu_soft_restart)
.popsection

View File

@ -5,7 +5,7 @@
#include <linux/linkage.h>
ENTRY(__efi_rt_asm_wrapper)
SYM_FUNC_START(__efi_rt_asm_wrapper)
stp x29, x30, [sp, #-32]!
mov x29, sp
@ -35,4 +35,4 @@ ENTRY(__efi_rt_asm_wrapper)
b.ne 0f
ret
0: b efi_handle_corrupted_x18 // tail call
ENDPROC(__efi_rt_asm_wrapper)
SYM_FUNC_END(__efi_rt_asm_wrapper)

View File

@ -16,34 +16,34 @@
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_save_state)
SYM_FUNC_START(fpsimd_save_state)
fpsimd_save x0, 8
ret
ENDPROC(fpsimd_save_state)
SYM_FUNC_END(fpsimd_save_state)
/*
* Load the FP registers.
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_load_state)
SYM_FUNC_START(fpsimd_load_state)
fpsimd_restore x0, 8
ret
ENDPROC(fpsimd_load_state)
SYM_FUNC_END(fpsimd_load_state)
#ifdef CONFIG_ARM64_SVE
ENTRY(sve_save_state)
SYM_FUNC_START(sve_save_state)
sve_save 0, x1, 2
ret
ENDPROC(sve_save_state)
SYM_FUNC_END(sve_save_state)
ENTRY(sve_load_state)
SYM_FUNC_START(sve_load_state)
sve_load 0, x1, x2, 3, x4
ret
ENDPROC(sve_load_state)
SYM_FUNC_END(sve_load_state)
ENTRY(sve_get_vl)
SYM_FUNC_START(sve_get_vl)
_sve_rdvl 0, 1
ret
ENDPROC(sve_get_vl)
SYM_FUNC_END(sve_get_vl)
#endif /* CONFIG_ARM64_SVE */

View File

@ -65,7 +65,7 @@
* x5: physical address of a zero page that remains zero after resume
*/
.pushsection ".hibernate_exit.text", "ax"
ENTRY(swsusp_arch_suspend_exit)
SYM_CODE_START(swsusp_arch_suspend_exit)
/*
* We execute from ttbr0, change ttbr1 to our copied linear map tables
* with a break-before-make via the zero page
@ -110,7 +110,7 @@ ENTRY(swsusp_arch_suspend_exit)
cbz x24, 3f /* Do we need to re-initialise EL2? */
hvc #0
3: ret
ENDPROC(swsusp_arch_suspend_exit)
SYM_CODE_END(swsusp_arch_suspend_exit)
/*
* Restore the hyp stub.
@ -119,15 +119,15 @@ ENDPROC(swsusp_arch_suspend_exit)
*
* x24: The physical address of __hyp_stub_vectors
*/
el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
msr vbar_el2, x24
eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)
.macro invalid_vector label
\label:
SYM_CODE_START_LOCAL(\label)
b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm
invalid_vector el2_sync_invalid
@ -141,7 +141,7 @@ ENDPROC(\label)
/* el2 vectors - switch el2 here while we restore the memory image. */
.align 11
ENTRY(hibernate_el2_vectors)
SYM_CODE_START(hibernate_el2_vectors)
ventry el2_sync_invalid // Synchronous EL2t
ventry el2_irq_invalid // IRQ EL2t
ventry el2_fiq_invalid // FIQ EL2t
@ -161,6 +161,6 @@ ENTRY(hibernate_el2_vectors)
ventry el1_irq_invalid // IRQ 32-bit EL1
ventry el1_fiq_invalid // FIQ 32-bit EL1
ventry el1_error_invalid // Error 32-bit EL1
END(hibernate_el2_vectors)
SYM_CODE_END(hibernate_el2_vectors)
.popsection

View File

@ -21,7 +21,7 @@
.align 11
ENTRY(__hyp_stub_vectors)
SYM_CODE_START(__hyp_stub_vectors)
ventry el2_sync_invalid // Synchronous EL2t
ventry el2_irq_invalid // IRQ EL2t
ventry el2_fiq_invalid // FIQ EL2t
@ -41,11 +41,11 @@ ENTRY(__hyp_stub_vectors)
ventry el1_irq_invalid // IRQ 32-bit EL1
ventry el1_fiq_invalid // FIQ 32-bit EL1
ventry el1_error_invalid // Error 32-bit EL1
ENDPROC(__hyp_stub_vectors)
SYM_CODE_END(__hyp_stub_vectors)
.align 11
el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
cmp x0, #HVC_SET_VECTORS
b.ne 2f
msr vbar_el2, x1
@ -68,12 +68,12 @@ el1_sync:
9: mov x0, xzr
eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)
.macro invalid_vector label
\label:
SYM_CODE_START_LOCAL(\label)
b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm
invalid_vector el2_sync_invalid
@ -106,15 +106,15 @@ ENDPROC(\label)
* initialisation entry point.
*/
ENTRY(__hyp_set_vectors)
SYM_FUNC_START(__hyp_set_vectors)
mov x1, x0
mov x0, #HVC_SET_VECTORS
hvc #0
ret
ENDPROC(__hyp_set_vectors)
SYM_FUNC_END(__hyp_set_vectors)
ENTRY(__hyp_reset_vectors)
SYM_FUNC_START(__hyp_reset_vectors)
mov x0, #HVC_RESET_VECTORS
hvc #0
ret
ENDPROC(__hyp_reset_vectors)
SYM_FUNC_END(__hyp_reset_vectors)

View File

@ -61,7 +61,7 @@
ldp x28, x29, [sp, #S_X28]
.endm
ENTRY(kretprobe_trampoline)
SYM_CODE_START(kretprobe_trampoline)
sub sp, sp, #S_FRAME_SIZE
save_all_base_regs
@ -79,4 +79,4 @@ ENTRY(kretprobe_trampoline)
add sp, sp, #S_FRAME_SIZE
ret
ENDPROC(kretprobe_trampoline)
SYM_CODE_END(kretprobe_trampoline)

View File

@ -5,81 +5,81 @@
#include <linux/linkage.h>
ENTRY(absolute_data64)
SYM_FUNC_START(absolute_data64)
ldr x0, 0f
ret
0: .quad sym64_abs
ENDPROC(absolute_data64)
SYM_FUNC_END(absolute_data64)
ENTRY(absolute_data32)
SYM_FUNC_START(absolute_data32)
ldr w0, 0f
ret
0: .long sym32_abs
ENDPROC(absolute_data32)
SYM_FUNC_END(absolute_data32)
ENTRY(absolute_data16)
SYM_FUNC_START(absolute_data16)
adr x0, 0f
ldrh w0, [x0]
ret
0: .short sym16_abs, 0
ENDPROC(absolute_data16)
SYM_FUNC_END(absolute_data16)
ENTRY(signed_movw)
SYM_FUNC_START(signed_movw)
movz x0, #:abs_g2_s:sym64_abs
movk x0, #:abs_g1_nc:sym64_abs
movk x0, #:abs_g0_nc:sym64_abs
ret
ENDPROC(signed_movw)
SYM_FUNC_END(signed_movw)
ENTRY(unsigned_movw)
SYM_FUNC_START(unsigned_movw)
movz x0, #:abs_g3:sym64_abs
movk x0, #:abs_g2_nc:sym64_abs
movk x0, #:abs_g1_nc:sym64_abs
movk x0, #:abs_g0_nc:sym64_abs
ret
ENDPROC(unsigned_movw)
SYM_FUNC_END(unsigned_movw)
.align 12
.space 0xff8
ENTRY(relative_adrp)
SYM_FUNC_START(relative_adrp)
adrp x0, sym64_rel
add x0, x0, #:lo12:sym64_rel
ret
ENDPROC(relative_adrp)
SYM_FUNC_END(relative_adrp)
.align 12
.space 0xffc
ENTRY(relative_adrp_far)
SYM_FUNC_START(relative_adrp_far)
adrp x0, memstart_addr
add x0, x0, #:lo12:memstart_addr
ret
ENDPROC(relative_adrp_far)
SYM_FUNC_END(relative_adrp_far)
ENTRY(relative_adr)
SYM_FUNC_START(relative_adr)
adr x0, sym64_rel
ret
ENDPROC(relative_adr)
SYM_FUNC_END(relative_adr)
ENTRY(relative_data64)
SYM_FUNC_START(relative_data64)
adr x1, 0f
ldr x0, [x1]
add x0, x0, x1
ret
0: .quad sym64_rel - .
ENDPROC(relative_data64)
SYM_FUNC_END(relative_data64)
ENTRY(relative_data32)
SYM_FUNC_START(relative_data32)
adr x1, 0f
ldr w0, [x1]
add x0, x0, x1
ret
0: .long sym64_rel - .
ENDPROC(relative_data32)
SYM_FUNC_END(relative_data32)
ENTRY(relative_data16)
SYM_FUNC_START(relative_data16)
adr x1, 0f
ldrsh w0, [x1]
add x0, x0, x1
ret
0: .short sym64_rel - ., 0
ENDPROC(relative_data16)
SYM_FUNC_END(relative_data16)

View File

@ -26,7 +26,7 @@
* control_code_page, a special page which has been set up to be preserved
* during the copy operation.
*/
ENTRY(arm64_relocate_new_kernel)
SYM_CODE_START(arm64_relocate_new_kernel)
/* Setup the list loop variables. */
mov x18, x2 /* x18 = dtb address */
@ -111,7 +111,7 @@ ENTRY(arm64_relocate_new_kernel)
mov x3, xzr
br x17
ENDPROC(arm64_relocate_new_kernel)
SYM_CODE_END(arm64_relocate_new_kernel)
.align 3 /* To keep the 64-bit values below naturally aligned. */

View File

@ -62,7 +62,7 @@
*
* x0 = struct sleep_stack_data area
*/
ENTRY(__cpu_suspend_enter)
SYM_FUNC_START(__cpu_suspend_enter)
stp x29, lr, [x0, #SLEEP_STACK_DATA_CALLEE_REGS]
stp x19, x20, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+16]
stp x21, x22, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+32]
@ -95,10 +95,10 @@ ENTRY(__cpu_suspend_enter)
ldp x29, lr, [sp], #16
mov x0, #1
ret
ENDPROC(__cpu_suspend_enter)
SYM_FUNC_END(__cpu_suspend_enter)
.pushsection ".idmap.text", "awx"
ENTRY(cpu_resume)
SYM_CODE_START(cpu_resume)
bl el2_setup // if in EL2 drop to EL1 cleanly
mov x0, #ARM64_CPU_RUNTIME
bl __cpu_setup
@ -107,11 +107,11 @@ ENTRY(cpu_resume)
bl __enable_mmu
ldr x8, =_cpu_resume
br x8
ENDPROC(cpu_resume)
SYM_CODE_END(cpu_resume)
.ltorg
.popsection
ENTRY(_cpu_resume)
SYM_FUNC_START(_cpu_resume)
mrs x1, mpidr_el1
adr_l x8, mpidr_hash // x8 = struct mpidr_hash virt address
@ -147,4 +147,4 @@ ENTRY(_cpu_resume)
ldp x29, lr, [x29]
mov x0, #0
ret
ENDPROC(_cpu_resume)
SYM_FUNC_END(_cpu_resume)

View File

@ -30,9 +30,9 @@
* unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
* struct arm_smccc_quirk *quirk)
*/
ENTRY(__arm_smccc_smc)
SYM_FUNC_START(__arm_smccc_smc)
SMCCC smc
ENDPROC(__arm_smccc_smc)
SYM_FUNC_END(__arm_smccc_smc)
EXPORT_SYMBOL(__arm_smccc_smc)
/*
@ -41,7 +41,7 @@ EXPORT_SYMBOL(__arm_smccc_smc)
* unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
* struct arm_smccc_quirk *quirk)
*/
ENTRY(__arm_smccc_hvc)
SYM_FUNC_START(__arm_smccc_hvc)
SMCCC hvc
ENDPROC(__arm_smccc_hvc)
SYM_FUNC_END(__arm_smccc_hvc)
EXPORT_SYMBOL(__arm_smccc_hvc)