selftests/bpf: Add big-endian support to the ldsx test

Prepare the ldsx test to run on big-endian systems by adding the
necessary endianness checks around narrow memory accesses.

Signed-off-by: Ilya Leoshkevich <iii@linux.ibm.com>
Link: https://lore.kernel.org/r/20230919101336.2223655-4-iii@linux.ibm.com
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
This commit is contained in:
Ilya Leoshkevich 2023-09-19 12:09:05 +02:00 committed by Alexei Starovoitov
parent 6cb66eca36
commit 9873ce2e9c
2 changed files with 90 additions and 62 deletions

View File

@ -104,7 +104,11 @@ int _tc(volatile struct __sk_buff *skb)
"%[tmp_mark] = r1"
: [tmp_mark]"=r"(tmp_mark)
: [ctx]"r"(skb),
[off_mark]"i"(offsetof(struct __sk_buff, mark))
[off_mark]"i"(offsetof(struct __sk_buff, mark)
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+ sizeof(skb->mark) - 1
#endif
)
: "r1");
#else
tmp_mark = (char)skb->mark;

View File

@ -13,12 +13,16 @@ __description("LDSX, S8")
__success __success_unpriv __retval(-2)
__naked void ldsx_s8(void)
{
asm volatile (" \
r1 = 0x3fe; \
*(u64 *)(r10 - 8) = r1; \
r0 = *(s8 *)(r10 - 8); \
exit; \
" ::: __clobber_all);
asm volatile (
"r1 = 0x3fe;"
"*(u64 *)(r10 - 8) = r1;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r0 = *(s8 *)(r10 - 8);"
#else
"r0 = *(s8 *)(r10 - 1);"
#endif
"exit;"
::: __clobber_all);
}
SEC("socket")
@ -26,12 +30,16 @@ __description("LDSX, S16")
__success __success_unpriv __retval(-2)
__naked void ldsx_s16(void)
{
asm volatile (" \
r1 = 0x3fffe; \
*(u64 *)(r10 - 8) = r1; \
r0 = *(s16 *)(r10 - 8); \
exit; \
" ::: __clobber_all);
asm volatile (
"r1 = 0x3fffe;"
"*(u64 *)(r10 - 8) = r1;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r0 = *(s16 *)(r10 - 8);"
#else
"r0 = *(s16 *)(r10 - 2);"
#endif
"exit;"
::: __clobber_all);
}
SEC("socket")
@ -39,13 +47,17 @@ __description("LDSX, S32")
__success __success_unpriv __retval(-1)
__naked void ldsx_s32(void)
{
asm volatile (" \
r1 = 0xfffffffe; \
*(u64 *)(r10 - 8) = r1; \
r0 = *(s32 *)(r10 - 8); \
r0 >>= 1; \
exit; \
" ::: __clobber_all);
asm volatile (
"r1 = 0xfffffffe;"
"*(u64 *)(r10 - 8) = r1;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r0 = *(s32 *)(r10 - 8);"
#else
"r0 = *(s32 *)(r10 - 4);"
#endif
"r0 >>= 1;"
"exit;"
::: __clobber_all);
}
SEC("socket")
@ -54,20 +66,24 @@ __log_level(2) __success __retval(1)
__msg("R1_w=scalar(smin=-128,smax=127)")
__naked void ldsx_s8_range_priv(void)
{
asm volatile (" \
call %[bpf_get_prandom_u32]; \
*(u64 *)(r10 - 8) = r0; \
r1 = *(s8 *)(r10 - 8); \
/* r1 with s8 range */ \
if r1 s> 0x7f goto l0_%=; \
if r1 s< -0x80 goto l0_%=; \
r0 = 1; \
l1_%=: \
exit; \
l0_%=: \
r0 = 2; \
goto l1_%=; \
" :
asm volatile (
"call %[bpf_get_prandom_u32];"
"*(u64 *)(r10 - 8) = r0;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r1 = *(s8 *)(r10 - 8);"
#else
"r1 = *(s8 *)(r10 - 1);"
#endif
/* r1 with s8 range */
"if r1 s> 0x7f goto l0_%=;"
"if r1 s< -0x80 goto l0_%=;"
"r0 = 1;"
"l1_%=:"
"exit;"
"l0_%=:"
"r0 = 2;"
"goto l1_%=;"
:
: __imm(bpf_get_prandom_u32)
: __clobber_all);
}
@ -77,20 +93,24 @@ __description("LDSX, S16 range checking")
__success __success_unpriv __retval(1)
__naked void ldsx_s16_range(void)
{
asm volatile (" \
call %[bpf_get_prandom_u32]; \
*(u64 *)(r10 - 8) = r0; \
r1 = *(s16 *)(r10 - 8); \
/* r1 with s16 range */ \
if r1 s> 0x7fff goto l0_%=; \
if r1 s< -0x8000 goto l0_%=; \
r0 = 1; \
l1_%=: \
exit; \
l0_%=: \
r0 = 2; \
goto l1_%=; \
" :
asm volatile (
"call %[bpf_get_prandom_u32];"
"*(u64 *)(r10 - 8) = r0;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r1 = *(s16 *)(r10 - 8);"
#else
"r1 = *(s16 *)(r10 - 2);"
#endif
/* r1 with s16 range */
"if r1 s> 0x7fff goto l0_%=;"
"if r1 s< -0x8000 goto l0_%=;"
"r0 = 1;"
"l1_%=:"
"exit;"
"l0_%=:"
"r0 = 2;"
"goto l1_%=;"
:
: __imm(bpf_get_prandom_u32)
: __clobber_all);
}
@ -100,20 +120,24 @@ __description("LDSX, S32 range checking")
__success __success_unpriv __retval(1)
__naked void ldsx_s32_range(void)
{
asm volatile (" \
call %[bpf_get_prandom_u32]; \
*(u64 *)(r10 - 8) = r0; \
r1 = *(s32 *)(r10 - 8); \
/* r1 with s16 range */ \
if r1 s> 0x7fffFFFF goto l0_%=; \
if r1 s< -0x80000000 goto l0_%=; \
r0 = 1; \
l1_%=: \
exit; \
l0_%=: \
r0 = 2; \
goto l1_%=; \
" :
asm volatile (
"call %[bpf_get_prandom_u32];"
"*(u64 *)(r10 - 8) = r0;"
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
"r1 = *(s32 *)(r10 - 8);"
#else
"r1 = *(s32 *)(r10 - 4);"
#endif
/* r1 with s16 range */
"if r1 s> 0x7fffFFFF goto l0_%=;"
"if r1 s< -0x80000000 goto l0_%=;"
"r0 = 1;"
"l1_%=:"
"exit;"
"l0_%=:"
"r0 = 2;"
"goto l1_%=;"
:
: __imm(bpf_get_prandom_u32)
: __clobber_all);
}