mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-11 21:38:32 +08:00
Merge branch 'for-next/asm-annotations' into for-next/core
* for-next/asm-annotations: (6 commits) arm64: kernel: Correct annotation of end of el0_sync ...
This commit is contained in:
commit
aa246c056c
@ -448,17 +448,6 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU
|
||||
b.ne 9998b
|
||||
.endm
|
||||
|
||||
/*
|
||||
* Annotate a function as position independent, i.e., safe to be called before
|
||||
* the kernel virtual mapping is activated.
|
||||
*/
|
||||
#define ENDPIPROC(x) \
|
||||
.globl __pi_##x; \
|
||||
.type __pi_##x, %function; \
|
||||
.set __pi_##x, x; \
|
||||
.size __pi_##x, . - x; \
|
||||
ENDPROC(x)
|
||||
|
||||
/*
|
||||
* Annotate a function as being unsuitable for kprobes.
|
||||
*/
|
||||
|
@ -4,4 +4,20 @@
|
||||
#define __ALIGN .align 2
|
||||
#define __ALIGN_STR ".align 2"
|
||||
|
||||
/*
|
||||
* Annotate a function as position independent, i.e., safe to be called before
|
||||
* the kernel virtual mapping is activated.
|
||||
*/
|
||||
#define SYM_FUNC_START_PI(x) \
|
||||
SYM_FUNC_START_ALIAS(__pi_##x); \
|
||||
SYM_FUNC_START(x)
|
||||
|
||||
#define SYM_FUNC_START_WEAK_PI(x) \
|
||||
SYM_FUNC_START_ALIAS(__pi_##x); \
|
||||
SYM_FUNC_START_WEAK(x)
|
||||
|
||||
#define SYM_FUNC_END_PI(x) \
|
||||
SYM_FUNC_END(x); \
|
||||
SYM_FUNC_END_ALIAS(__pi_##x)
|
||||
|
||||
#endif
|
||||
|
@ -650,6 +650,7 @@ el0_sync:
|
||||
mov x0, sp
|
||||
bl el0_sync_handler
|
||||
b ret_to_user
|
||||
ENDPROC(el0_sync)
|
||||
|
||||
#ifdef CONFIG_COMPAT
|
||||
.align 6
|
||||
@ -658,16 +659,18 @@ el0_sync_compat:
|
||||
mov x0, sp
|
||||
bl el0_sync_compat_handler
|
||||
b ret_to_user
|
||||
ENDPROC(el0_sync)
|
||||
ENDPROC(el0_sync_compat)
|
||||
|
||||
.align 6
|
||||
el0_irq_compat:
|
||||
kernel_entry 0, 32
|
||||
b el0_irq_naked
|
||||
ENDPROC(el0_irq_compat)
|
||||
|
||||
el0_error_compat:
|
||||
kernel_entry 0, 32
|
||||
b el0_error_naked
|
||||
ENDPROC(el0_error_compat)
|
||||
#endif
|
||||
|
||||
.align 6
|
||||
|
@ -14,7 +14,7 @@
|
||||
* Parameters:
|
||||
* x0 - dest
|
||||
*/
|
||||
ENTRY(clear_page)
|
||||
SYM_FUNC_START(clear_page)
|
||||
mrs x1, dczid_el0
|
||||
and w1, w1, #0xf
|
||||
mov x2, #4
|
||||
@ -25,5 +25,5 @@ ENTRY(clear_page)
|
||||
tst x0, #(PAGE_SIZE - 1)
|
||||
b.ne 1b
|
||||
ret
|
||||
ENDPROC(clear_page)
|
||||
SYM_FUNC_END(clear_page)
|
||||
EXPORT_SYMBOL(clear_page)
|
||||
|
@ -19,7 +19,7 @@
|
||||
*
|
||||
* Alignment fixed up by hardware.
|
||||
*/
|
||||
ENTRY(__arch_clear_user)
|
||||
SYM_FUNC_START(__arch_clear_user)
|
||||
mov x2, x1 // save the size for fixup return
|
||||
subs x1, x1, #8
|
||||
b.mi 2f
|
||||
@ -40,7 +40,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2
|
||||
uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
|
||||
5: mov x0, #0
|
||||
ret
|
||||
ENDPROC(__arch_clear_user)
|
||||
SYM_FUNC_END(__arch_clear_user)
|
||||
EXPORT_SYMBOL(__arch_clear_user)
|
||||
|
||||
.section .fixup,"ax"
|
||||
|
@ -53,12 +53,12 @@
|
||||
.endm
|
||||
|
||||
end .req x5
|
||||
ENTRY(__arch_copy_from_user)
|
||||
SYM_FUNC_START(__arch_copy_from_user)
|
||||
add end, x0, x2
|
||||
#include "copy_template.S"
|
||||
mov x0, #0 // Nothing to copy
|
||||
ret
|
||||
ENDPROC(__arch_copy_from_user)
|
||||
SYM_FUNC_END(__arch_copy_from_user)
|
||||
EXPORT_SYMBOL(__arch_copy_from_user)
|
||||
|
||||
.section .fixup,"ax"
|
||||
|
@ -55,12 +55,12 @@
|
||||
|
||||
end .req x5
|
||||
|
||||
ENTRY(__arch_copy_in_user)
|
||||
SYM_FUNC_START(__arch_copy_in_user)
|
||||
add end, x0, x2
|
||||
#include "copy_template.S"
|
||||
mov x0, #0
|
||||
ret
|
||||
ENDPROC(__arch_copy_in_user)
|
||||
SYM_FUNC_END(__arch_copy_in_user)
|
||||
EXPORT_SYMBOL(__arch_copy_in_user)
|
||||
|
||||
.section .fixup,"ax"
|
||||
|
@ -17,7 +17,7 @@
|
||||
* x0 - dest
|
||||
* x1 - src
|
||||
*/
|
||||
ENTRY(copy_page)
|
||||
SYM_FUNC_START(copy_page)
|
||||
alternative_if ARM64_HAS_NO_HW_PREFETCH
|
||||
// Prefetch three cache lines ahead.
|
||||
prfm pldl1strm, [x1, #128]
|
||||
@ -75,5 +75,5 @@ alternative_else_nop_endif
|
||||
stnp x16, x17, [x0, #112 - 256]
|
||||
|
||||
ret
|
||||
ENDPROC(copy_page)
|
||||
SYM_FUNC_END(copy_page)
|
||||
EXPORT_SYMBOL(copy_page)
|
||||
|
@ -52,12 +52,12 @@
|
||||
.endm
|
||||
|
||||
end .req x5
|
||||
ENTRY(__arch_copy_to_user)
|
||||
SYM_FUNC_START(__arch_copy_to_user)
|
||||
add end, x0, x2
|
||||
#include "copy_template.S"
|
||||
mov x0, #0
|
||||
ret
|
||||
ENDPROC(__arch_copy_to_user)
|
||||
SYM_FUNC_END(__arch_copy_to_user)
|
||||
EXPORT_SYMBOL(__arch_copy_to_user)
|
||||
|
||||
.section .fixup,"ax"
|
||||
|
@ -85,17 +85,17 @@ CPU_BE( rev16 w3, w3 )
|
||||
.endm
|
||||
|
||||
.align 5
|
||||
ENTRY(crc32_le)
|
||||
SYM_FUNC_START(crc32_le)
|
||||
alternative_if_not ARM64_HAS_CRC32
|
||||
b crc32_le_base
|
||||
alternative_else_nop_endif
|
||||
__crc32
|
||||
ENDPROC(crc32_le)
|
||||
SYM_FUNC_END(crc32_le)
|
||||
|
||||
.align 5
|
||||
ENTRY(__crc32c_le)
|
||||
SYM_FUNC_START(__crc32c_le)
|
||||
alternative_if_not ARM64_HAS_CRC32
|
||||
b __crc32c_le_base
|
||||
alternative_else_nop_endif
|
||||
__crc32 c
|
||||
ENDPROC(__crc32c_le)
|
||||
SYM_FUNC_END(__crc32c_le)
|
||||
|
@ -19,7 +19,7 @@
|
||||
* Returns:
|
||||
* x0 - address of first occurrence of 'c' or 0
|
||||
*/
|
||||
WEAK(memchr)
|
||||
SYM_FUNC_START_WEAK_PI(memchr)
|
||||
and w1, w1, #0xff
|
||||
1: subs x2, x2, #1
|
||||
b.mi 2f
|
||||
@ -30,5 +30,5 @@ WEAK(memchr)
|
||||
ret
|
||||
2: mov x0, #0
|
||||
ret
|
||||
ENDPIPROC(memchr)
|
||||
SYM_FUNC_END_PI(memchr)
|
||||
EXPORT_SYMBOL_NOKASAN(memchr)
|
||||
|
@ -46,7 +46,7 @@ pos .req x11
|
||||
limit_wd .req x12
|
||||
mask .req x13
|
||||
|
||||
WEAK(memcmp)
|
||||
SYM_FUNC_START_WEAK_PI(memcmp)
|
||||
cbz limit, .Lret0
|
||||
eor tmp1, src1, src2
|
||||
tst tmp1, #7
|
||||
@ -243,5 +243,5 @@ CPU_LE( rev data2, data2 )
|
||||
.Lret0:
|
||||
mov result, #0
|
||||
ret
|
||||
ENDPIPROC(memcmp)
|
||||
SYM_FUNC_END_PI(memcmp)
|
||||
EXPORT_SYMBOL_NOKASAN(memcmp)
|
||||
|
@ -57,11 +57,11 @@
|
||||
.endm
|
||||
|
||||
.weak memcpy
|
||||
ENTRY(__memcpy)
|
||||
ENTRY(memcpy)
|
||||
SYM_FUNC_START_ALIAS(__memcpy)
|
||||
SYM_FUNC_START_PI(memcpy)
|
||||
#include "copy_template.S"
|
||||
ret
|
||||
ENDPIPROC(memcpy)
|
||||
SYM_FUNC_END_PI(memcpy)
|
||||
EXPORT_SYMBOL(memcpy)
|
||||
ENDPROC(__memcpy)
|
||||
SYM_FUNC_END_ALIAS(__memcpy)
|
||||
EXPORT_SYMBOL(__memcpy)
|
||||
|
@ -46,8 +46,8 @@ D_l .req x13
|
||||
D_h .req x14
|
||||
|
||||
.weak memmove
|
||||
ENTRY(__memmove)
|
||||
ENTRY(memmove)
|
||||
SYM_FUNC_START_ALIAS(__memmove)
|
||||
SYM_FUNC_START_PI(memmove)
|
||||
cmp dstin, src
|
||||
b.lo __memcpy
|
||||
add tmp1, src, count
|
||||
@ -184,7 +184,7 @@ ENTRY(memmove)
|
||||
tst count, #0x3f
|
||||
b.ne .Ltail63
|
||||
ret
|
||||
ENDPIPROC(memmove)
|
||||
SYM_FUNC_END_PI(memmove)
|
||||
EXPORT_SYMBOL(memmove)
|
||||
ENDPROC(__memmove)
|
||||
SYM_FUNC_END_ALIAS(__memmove)
|
||||
EXPORT_SYMBOL(__memmove)
|
||||
|
@ -43,8 +43,8 @@ tmp3w .req w9
|
||||
tmp3 .req x9
|
||||
|
||||
.weak memset
|
||||
ENTRY(__memset)
|
||||
ENTRY(memset)
|
||||
SYM_FUNC_START_ALIAS(__memset)
|
||||
SYM_FUNC_START_PI(memset)
|
||||
mov dst, dstin /* Preserve return value. */
|
||||
and A_lw, val, #255
|
||||
orr A_lw, A_lw, A_lw, lsl #8
|
||||
@ -203,7 +203,7 @@ ENTRY(memset)
|
||||
ands count, count, zva_bits_x
|
||||
b.ne .Ltail_maybe_long
|
||||
ret
|
||||
ENDPIPROC(memset)
|
||||
SYM_FUNC_END_PI(memset)
|
||||
EXPORT_SYMBOL(memset)
|
||||
ENDPROC(__memset)
|
||||
SYM_FUNC_END_ALIAS(__memset)
|
||||
EXPORT_SYMBOL(__memset)
|
||||
|
@ -18,7 +18,7 @@
|
||||
* Returns:
|
||||
* x0 - address of first occurrence of 'c' or 0
|
||||
*/
|
||||
WEAK(strchr)
|
||||
SYM_FUNC_START_WEAK(strchr)
|
||||
and w1, w1, #0xff
|
||||
1: ldrb w2, [x0], #1
|
||||
cmp w2, w1
|
||||
@ -28,5 +28,5 @@ WEAK(strchr)
|
||||
cmp w2, w1
|
||||
csel x0, x0, xzr, eq
|
||||
ret
|
||||
ENDPROC(strchr)
|
||||
SYM_FUNC_END(strchr)
|
||||
EXPORT_SYMBOL_NOKASAN(strchr)
|
||||
|
@ -48,7 +48,7 @@ tmp3 .req x9
|
||||
zeroones .req x10
|
||||
pos .req x11
|
||||
|
||||
WEAK(strcmp)
|
||||
SYM_FUNC_START_WEAK_PI(strcmp)
|
||||
eor tmp1, src1, src2
|
||||
mov zeroones, #REP8_01
|
||||
tst tmp1, #7
|
||||
@ -219,5 +219,5 @@ CPU_BE( orr syndrome, diff, has_nul )
|
||||
lsr data1, data1, #56
|
||||
sub result, data1, data2, lsr #56
|
||||
ret
|
||||
ENDPIPROC(strcmp)
|
||||
SYM_FUNC_END_PI(strcmp)
|
||||
EXPORT_SYMBOL_NOKASAN(strcmp)
|
||||
|
@ -44,7 +44,7 @@ pos .req x12
|
||||
#define REP8_7f 0x7f7f7f7f7f7f7f7f
|
||||
#define REP8_80 0x8080808080808080
|
||||
|
||||
WEAK(strlen)
|
||||
SYM_FUNC_START_WEAK_PI(strlen)
|
||||
mov zeroones, #REP8_01
|
||||
bic src, srcin, #15
|
||||
ands tmp1, srcin, #15
|
||||
@ -111,5 +111,5 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
|
||||
csinv data1, data1, xzr, le
|
||||
csel data2, data2, data2a, le
|
||||
b .Lrealigned
|
||||
ENDPIPROC(strlen)
|
||||
SYM_FUNC_END_PI(strlen)
|
||||
EXPORT_SYMBOL_NOKASAN(strlen)
|
||||
|
@ -52,7 +52,7 @@ limit_wd .req x13
|
||||
mask .req x14
|
||||
endloop .req x15
|
||||
|
||||
WEAK(strncmp)
|
||||
SYM_FUNC_START_WEAK_PI(strncmp)
|
||||
cbz limit, .Lret0
|
||||
eor tmp1, src1, src2
|
||||
mov zeroones, #REP8_01
|
||||
@ -295,5 +295,5 @@ CPU_BE( orr syndrome, diff, has_nul )
|
||||
.Lret0:
|
||||
mov result, #0
|
||||
ret
|
||||
ENDPIPROC(strncmp)
|
||||
SYM_FUNC_END_PI(strncmp)
|
||||
EXPORT_SYMBOL_NOKASAN(strncmp)
|
||||
|
@ -47,7 +47,7 @@ limit_wd .req x14
|
||||
#define REP8_7f 0x7f7f7f7f7f7f7f7f
|
||||
#define REP8_80 0x8080808080808080
|
||||
|
||||
WEAK(strnlen)
|
||||
SYM_FUNC_START_WEAK_PI(strnlen)
|
||||
cbz limit, .Lhit_limit
|
||||
mov zeroones, #REP8_01
|
||||
bic src, srcin, #15
|
||||
@ -156,5 +156,5 @@ CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
|
||||
.Lhit_limit:
|
||||
mov len, limit
|
||||
ret
|
||||
ENDPIPROC(strnlen)
|
||||
SYM_FUNC_END_PI(strnlen)
|
||||
EXPORT_SYMBOL_NOKASAN(strnlen)
|
||||
|
@ -18,7 +18,7 @@
|
||||
* Returns:
|
||||
* x0 - address of last occurrence of 'c' or 0
|
||||
*/
|
||||
WEAK(strrchr)
|
||||
SYM_FUNC_START_WEAK_PI(strrchr)
|
||||
mov x3, #0
|
||||
and w1, w1, #0xff
|
||||
1: ldrb w2, [x0], #1
|
||||
@ -29,5 +29,5 @@ WEAK(strrchr)
|
||||
b 1b
|
||||
2: mov x0, x3
|
||||
ret
|
||||
ENDPIPROC(strrchr)
|
||||
SYM_FUNC_END_PI(strrchr)
|
||||
EXPORT_SYMBOL_NOKASAN(strrchr)
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
#include <asm/assembler.h>
|
||||
|
||||
ENTRY(__ashlti3)
|
||||
SYM_FUNC_START(__ashlti3)
|
||||
cbz x2, 1f
|
||||
mov x3, #64
|
||||
sub x3, x3, x2
|
||||
@ -26,10 +26,10 @@ ENTRY(__ashlti3)
|
||||
lsl x1, x0, x1
|
||||
mov x0, x2
|
||||
ret
|
||||
ENDPROC(__ashlti3)
|
||||
SYM_FUNC_END(__ashlti3)
|
||||
EXPORT_SYMBOL(__ashlti3)
|
||||
|
||||
ENTRY(__ashrti3)
|
||||
SYM_FUNC_START(__ashrti3)
|
||||
cbz x2, 1f
|
||||
mov x3, #64
|
||||
sub x3, x3, x2
|
||||
@ -48,10 +48,10 @@ ENTRY(__ashrti3)
|
||||
asr x0, x1, x0
|
||||
mov x1, x2
|
||||
ret
|
||||
ENDPROC(__ashrti3)
|
||||
SYM_FUNC_END(__ashrti3)
|
||||
EXPORT_SYMBOL(__ashrti3)
|
||||
|
||||
ENTRY(__lshrti3)
|
||||
SYM_FUNC_START(__lshrti3)
|
||||
cbz x2, 1f
|
||||
mov x3, #64
|
||||
sub x3, x3, x2
|
||||
@ -70,5 +70,5 @@ ENTRY(__lshrti3)
|
||||
lsr x0, x1, x0
|
||||
mov x1, x2
|
||||
ret
|
||||
ENDPROC(__lshrti3)
|
||||
SYM_FUNC_END(__lshrti3)
|
||||
EXPORT_SYMBOL(__lshrti3)
|
||||
|
@ -24,7 +24,7 @@
|
||||
* - start - virtual start address of region
|
||||
* - end - virtual end address of region
|
||||
*/
|
||||
ENTRY(__flush_icache_range)
|
||||
SYM_FUNC_START(__flush_icache_range)
|
||||
/* FALLTHROUGH */
|
||||
|
||||
/*
|
||||
@ -37,7 +37,7 @@ ENTRY(__flush_icache_range)
|
||||
* - start - virtual start address of region
|
||||
* - end - virtual end address of region
|
||||
*/
|
||||
ENTRY(__flush_cache_user_range)
|
||||
SYM_FUNC_START(__flush_cache_user_range)
|
||||
uaccess_ttbr0_enable x2, x3, x4
|
||||
alternative_if ARM64_HAS_CACHE_IDC
|
||||
dsb ishst
|
||||
@ -66,8 +66,8 @@ alternative_else_nop_endif
|
||||
9:
|
||||
mov x0, #-EFAULT
|
||||
b 1b
|
||||
ENDPROC(__flush_icache_range)
|
||||
ENDPROC(__flush_cache_user_range)
|
||||
SYM_FUNC_END(__flush_icache_range)
|
||||
SYM_FUNC_END(__flush_cache_user_range)
|
||||
|
||||
/*
|
||||
* invalidate_icache_range(start,end)
|
||||
@ -77,7 +77,7 @@ ENDPROC(__flush_cache_user_range)
|
||||
* - start - virtual start address of region
|
||||
* - end - virtual end address of region
|
||||
*/
|
||||
ENTRY(invalidate_icache_range)
|
||||
SYM_FUNC_START(invalidate_icache_range)
|
||||
alternative_if ARM64_HAS_CACHE_DIC
|
||||
mov x0, xzr
|
||||
isb
|
||||
@ -94,7 +94,7 @@ alternative_else_nop_endif
|
||||
2:
|
||||
mov x0, #-EFAULT
|
||||
b 1b
|
||||
ENDPROC(invalidate_icache_range)
|
||||
SYM_FUNC_END(invalidate_icache_range)
|
||||
|
||||
/*
|
||||
* __flush_dcache_area(kaddr, size)
|
||||
@ -105,10 +105,10 @@ ENDPROC(invalidate_icache_range)
|
||||
* - kaddr - kernel address
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__flush_dcache_area)
|
||||
SYM_FUNC_START_PI(__flush_dcache_area)
|
||||
dcache_by_line_op civac, sy, x0, x1, x2, x3
|
||||
ret
|
||||
ENDPIPROC(__flush_dcache_area)
|
||||
SYM_FUNC_END_PI(__flush_dcache_area)
|
||||
|
||||
/*
|
||||
* __clean_dcache_area_pou(kaddr, size)
|
||||
@ -119,14 +119,14 @@ ENDPIPROC(__flush_dcache_area)
|
||||
* - kaddr - kernel address
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__clean_dcache_area_pou)
|
||||
SYM_FUNC_START(__clean_dcache_area_pou)
|
||||
alternative_if ARM64_HAS_CACHE_IDC
|
||||
dsb ishst
|
||||
ret
|
||||
alternative_else_nop_endif
|
||||
dcache_by_line_op cvau, ish, x0, x1, x2, x3
|
||||
ret
|
||||
ENDPROC(__clean_dcache_area_pou)
|
||||
SYM_FUNC_END(__clean_dcache_area_pou)
|
||||
|
||||
/*
|
||||
* __inval_dcache_area(kaddr, size)
|
||||
@ -138,7 +138,8 @@ ENDPROC(__clean_dcache_area_pou)
|
||||
* - kaddr - kernel address
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__inval_dcache_area)
|
||||
SYM_FUNC_START_LOCAL(__dma_inv_area)
|
||||
SYM_FUNC_START_PI(__inval_dcache_area)
|
||||
/* FALLTHROUGH */
|
||||
|
||||
/*
|
||||
@ -146,7 +147,6 @@ ENTRY(__inval_dcache_area)
|
||||
* - start - virtual start address of region
|
||||
* - size - size in question
|
||||
*/
|
||||
__dma_inv_area:
|
||||
add x1, x1, x0
|
||||
dcache_line_size x2, x3
|
||||
sub x3, x2, #1
|
||||
@ -165,8 +165,8 @@ __dma_inv_area:
|
||||
b.lo 2b
|
||||
dsb sy
|
||||
ret
|
||||
ENDPIPROC(__inval_dcache_area)
|
||||
ENDPROC(__dma_inv_area)
|
||||
SYM_FUNC_END_PI(__inval_dcache_area)
|
||||
SYM_FUNC_END(__dma_inv_area)
|
||||
|
||||
/*
|
||||
* __clean_dcache_area_poc(kaddr, size)
|
||||
@ -177,7 +177,8 @@ ENDPROC(__dma_inv_area)
|
||||
* - kaddr - kernel address
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__clean_dcache_area_poc)
|
||||
SYM_FUNC_START_LOCAL(__dma_clean_area)
|
||||
SYM_FUNC_START_PI(__clean_dcache_area_poc)
|
||||
/* FALLTHROUGH */
|
||||
|
||||
/*
|
||||
@ -185,11 +186,10 @@ ENTRY(__clean_dcache_area_poc)
|
||||
* - start - virtual start address of region
|
||||
* - size - size in question
|
||||
*/
|
||||
__dma_clean_area:
|
||||
dcache_by_line_op cvac, sy, x0, x1, x2, x3
|
||||
ret
|
||||
ENDPIPROC(__clean_dcache_area_poc)
|
||||
ENDPROC(__dma_clean_area)
|
||||
SYM_FUNC_END_PI(__clean_dcache_area_poc)
|
||||
SYM_FUNC_END(__dma_clean_area)
|
||||
|
||||
/*
|
||||
* __clean_dcache_area_pop(kaddr, size)
|
||||
@ -200,13 +200,13 @@ ENDPROC(__dma_clean_area)
|
||||
* - kaddr - kernel address
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__clean_dcache_area_pop)
|
||||
SYM_FUNC_START_PI(__clean_dcache_area_pop)
|
||||
alternative_if_not ARM64_HAS_DCPOP
|
||||
b __clean_dcache_area_poc
|
||||
alternative_else_nop_endif
|
||||
dcache_by_line_op cvap, sy, x0, x1, x2, x3
|
||||
ret
|
||||
ENDPIPROC(__clean_dcache_area_pop)
|
||||
SYM_FUNC_END_PI(__clean_dcache_area_pop)
|
||||
|
||||
/*
|
||||
* __dma_flush_area(start, size)
|
||||
@ -216,10 +216,10 @@ ENDPIPROC(__clean_dcache_area_pop)
|
||||
* - start - virtual start address of region
|
||||
* - size - size in question
|
||||
*/
|
||||
ENTRY(__dma_flush_area)
|
||||
SYM_FUNC_START_PI(__dma_flush_area)
|
||||
dcache_by_line_op civac, sy, x0, x1, x2, x3
|
||||
ret
|
||||
ENDPIPROC(__dma_flush_area)
|
||||
SYM_FUNC_END_PI(__dma_flush_area)
|
||||
|
||||
/*
|
||||
* __dma_map_area(start, size, dir)
|
||||
@ -227,11 +227,11 @@ ENDPIPROC(__dma_flush_area)
|
||||
* - size - size of region
|
||||
* - dir - DMA direction
|
||||
*/
|
||||
ENTRY(__dma_map_area)
|
||||
SYM_FUNC_START_PI(__dma_map_area)
|
||||
cmp w2, #DMA_FROM_DEVICE
|
||||
b.eq __dma_inv_area
|
||||
b __dma_clean_area
|
||||
ENDPIPROC(__dma_map_area)
|
||||
SYM_FUNC_END_PI(__dma_map_area)
|
||||
|
||||
/*
|
||||
* __dma_unmap_area(start, size, dir)
|
||||
@ -239,8 +239,8 @@ ENDPIPROC(__dma_map_area)
|
||||
* - size - size of region
|
||||
* - dir - DMA direction
|
||||
*/
|
||||
ENTRY(__dma_unmap_area)
|
||||
SYM_FUNC_START_PI(__dma_unmap_area)
|
||||
cmp w2, #DMA_TO_DEVICE
|
||||
b.ne __dma_inv_area
|
||||
ret
|
||||
ENDPIPROC(__dma_unmap_area)
|
||||
SYM_FUNC_END_PI(__dma_unmap_area)
|
||||
|
@ -57,7 +57,7 @@
|
||||
*
|
||||
* x0: virtual address of context pointer
|
||||
*/
|
||||
ENTRY(cpu_do_suspend)
|
||||
SYM_FUNC_START(cpu_do_suspend)
|
||||
mrs x2, tpidr_el0
|
||||
mrs x3, tpidrro_el0
|
||||
mrs x4, contextidr_el1
|
||||
@ -81,7 +81,7 @@ alternative_endif
|
||||
stp x10, x11, [x0, #64]
|
||||
stp x12, x13, [x0, #80]
|
||||
ret
|
||||
ENDPROC(cpu_do_suspend)
|
||||
SYM_FUNC_END(cpu_do_suspend)
|
||||
|
||||
/**
|
||||
* cpu_do_resume - restore CPU register context
|
||||
@ -89,7 +89,7 @@ ENDPROC(cpu_do_suspend)
|
||||
* x0: Address of context pointer
|
||||
*/
|
||||
.pushsection ".idmap.text", "awx"
|
||||
ENTRY(cpu_do_resume)
|
||||
SYM_FUNC_START(cpu_do_resume)
|
||||
ldp x2, x3, [x0]
|
||||
ldp x4, x5, [x0, #16]
|
||||
ldp x6, x8, [x0, #32]
|
||||
@ -138,7 +138,7 @@ alternative_else_nop_endif
|
||||
|
||||
isb
|
||||
ret
|
||||
ENDPROC(cpu_do_resume)
|
||||
SYM_FUNC_END(cpu_do_resume)
|
||||
.popsection
|
||||
#endif
|
||||
|
||||
@ -149,7 +149,7 @@ ENDPROC(cpu_do_resume)
|
||||
*
|
||||
* - pgd_phys - physical address of new TTB
|
||||
*/
|
||||
ENTRY(cpu_do_switch_mm)
|
||||
SYM_FUNC_START(cpu_do_switch_mm)
|
||||
mrs x2, ttbr1_el1
|
||||
mmid x1, x1 // get mm->context.id
|
||||
phys_to_ttbr x3, x0
|
||||
@ -168,7 +168,7 @@ alternative_else_nop_endif
|
||||
msr ttbr0_el1, x3 // now update TTBR0
|
||||
isb
|
||||
b post_ttbr_update_workaround // Back to C code...
|
||||
ENDPROC(cpu_do_switch_mm)
|
||||
SYM_FUNC_END(cpu_do_switch_mm)
|
||||
|
||||
.pushsection ".idmap.text", "awx"
|
||||
|
||||
@ -189,7 +189,7 @@ ENDPROC(cpu_do_switch_mm)
|
||||
* This is the low-level counterpart to cpu_replace_ttbr1, and should not be
|
||||
* called by anything else. It can only be executed from a TTBR0 mapping.
|
||||
*/
|
||||
ENTRY(idmap_cpu_replace_ttbr1)
|
||||
SYM_FUNC_START(idmap_cpu_replace_ttbr1)
|
||||
save_and_disable_daif flags=x2
|
||||
|
||||
__idmap_cpu_set_reserved_ttbr1 x1, x3
|
||||
@ -201,7 +201,7 @@ ENTRY(idmap_cpu_replace_ttbr1)
|
||||
restore_daif x2
|
||||
|
||||
ret
|
||||
ENDPROC(idmap_cpu_replace_ttbr1)
|
||||
SYM_FUNC_END(idmap_cpu_replace_ttbr1)
|
||||
.popsection
|
||||
|
||||
#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
|
||||
@ -229,7 +229,7 @@ ENDPROC(idmap_cpu_replace_ttbr1)
|
||||
*/
|
||||
__idmap_kpti_flag:
|
||||
.long 1
|
||||
ENTRY(idmap_kpti_install_ng_mappings)
|
||||
SYM_FUNC_START(idmap_kpti_install_ng_mappings)
|
||||
cpu .req w0
|
||||
num_cpus .req w1
|
||||
swapper_pa .req x2
|
||||
@ -401,7 +401,7 @@ __idmap_kpti_secondary:
|
||||
|
||||
.unreq swapper_ttb
|
||||
.unreq flag_ptr
|
||||
ENDPROC(idmap_kpti_install_ng_mappings)
|
||||
SYM_FUNC_END(idmap_kpti_install_ng_mappings)
|
||||
.popsection
|
||||
#endif
|
||||
|
||||
@ -412,7 +412,7 @@ ENDPROC(idmap_kpti_install_ng_mappings)
|
||||
* value of the SCTLR_EL1 register.
|
||||
*/
|
||||
.pushsection ".idmap.text", "awx"
|
||||
ENTRY(__cpu_setup)
|
||||
SYM_FUNC_START(__cpu_setup)
|
||||
tlbi vmalle1 // Invalidate local TLB
|
||||
dsb nsh
|
||||
|
||||
@ -469,4 +469,4 @@ ENTRY(__cpu_setup)
|
||||
#endif /* CONFIG_ARM64_HW_AFDBM */
|
||||
msr tcr_el1, x10
|
||||
ret // return to head.S
|
||||
ENDPROC(__cpu_setup)
|
||||
SYM_FUNC_END(__cpu_setup)
|
||||
|
@ -56,11 +56,11 @@
|
||||
#define XEN_IMM 0xEA1
|
||||
|
||||
#define HYPERCALL_SIMPLE(hypercall) \
|
||||
ENTRY(HYPERVISOR_##hypercall) \
|
||||
SYM_FUNC_START(HYPERVISOR_##hypercall) \
|
||||
mov x16, #__HYPERVISOR_##hypercall; \
|
||||
hvc XEN_IMM; \
|
||||
ret; \
|
||||
ENDPROC(HYPERVISOR_##hypercall)
|
||||
SYM_FUNC_END(HYPERVISOR_##hypercall)
|
||||
|
||||
#define HYPERCALL0 HYPERCALL_SIMPLE
|
||||
#define HYPERCALL1 HYPERCALL_SIMPLE
|
||||
@ -86,7 +86,7 @@ HYPERCALL2(multicall);
|
||||
HYPERCALL2(vm_assist);
|
||||
HYPERCALL3(dm_op);
|
||||
|
||||
ENTRY(privcmd_call)
|
||||
SYM_FUNC_START(privcmd_call)
|
||||
mov x16, x0
|
||||
mov x0, x1
|
||||
mov x1, x2
|
||||
@ -109,4 +109,4 @@ ENTRY(privcmd_call)
|
||||
*/
|
||||
uaccess_ttbr0_disable x6, x7
|
||||
ret
|
||||
ENDPROC(privcmd_call);
|
||||
SYM_FUNC_END(privcmd_call);
|
||||
|
Loading…
Reference in New Issue
Block a user