mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2025-01-10 15:54:39 +08:00
powerpc: Reduce csum_add() complexity for PPC64
PPC64 does everything in C, gcc is able to skip calculation when one of the operands in zero. Move the constant folding in PPC32 part. This helps GCC and reduces ppc64_defconfig by 170 bytes. Signed-off-by: Christophe Leroy <christophe.leroy@csgroup.eu> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au> Link: https://lore.kernel.org/r/a4ca63dd4c4b09e1906d08fb814af5a41d0f3fcb.1644651363.git.christophe.leroy@csgroup.eu
This commit is contained in:
parent
a553476c44
commit
f206fdd9d4
@ -95,16 +95,15 @@ static __always_inline __wsum csum_add(__wsum csum, __wsum addend)
|
||||
{
|
||||
#ifdef __powerpc64__
|
||||
u64 res = (__force u64)csum;
|
||||
#endif
|
||||
|
||||
res += (__force u64)addend;
|
||||
return (__force __wsum)((u32)res + (res >> 32));
|
||||
#else
|
||||
if (__builtin_constant_p(csum) && csum == 0)
|
||||
return addend;
|
||||
if (__builtin_constant_p(addend) && addend == 0)
|
||||
return csum;
|
||||
|
||||
#ifdef __powerpc64__
|
||||
res += (__force u64)addend;
|
||||
return (__force __wsum)((u32)res + (res >> 32));
|
||||
#else
|
||||
asm("addc %0,%0,%1;"
|
||||
"addze %0,%0;"
|
||||
: "+r" (csum) : "r" (addend) : "xer");
|
||||
|
Loading…
Reference in New Issue
Block a user