mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-21 11:44:01 +08:00
7837314d14
It was a nice optimization - on paper at least. In practice it results in
branches that may exceed the maximum legal range for a branch. We can
fight that problem with -ffunction-sections but -ffunction-sections again
is incompatible with -pg used by the function tracer.
By rewriting the loop around all simple LL/SC blocks to C we reduce the
amount of inline assembler and at the same time allow GCC to often fill
the branch delay slots with something sensible or whatever else clever
optimization it may have up in its sleeve.
With this optimization gone we also no longer need -ffunction-sections,
so drop it.
This optimization was originally introduced in 2.6.21, commit
5999eca25c1fd4b9b9aca7833b04d10fe4bc877d (linux-mips.org) rsp.
f65e4fa8e0
(kernel.org).
Original fix for the issues which caused me to pull this optimization by
Paul Gortmaker <paul.gortmaker@windriver.com>.
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
122 lines
3.1 KiB
C
122 lines
3.1 KiB
C
/*
|
|
* This file is subject to the terms and conditions of the GNU General Public
|
|
* License. See the file "COPYING" in the main directory of this archive
|
|
* for more details.
|
|
*
|
|
* Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
|
|
*/
|
|
#ifndef __ASM_CMPXCHG_H
|
|
#define __ASM_CMPXCHG_H
|
|
|
|
#include <linux/irqflags.h>
|
|
|
|
#define __HAVE_ARCH_CMPXCHG 1
|
|
|
|
#define __cmpxchg_asm(ld, st, m, old, new) \
|
|
({ \
|
|
__typeof(*(m)) __ret; \
|
|
\
|
|
if (kernel_uses_llsc && R10000_LLSC_WAR) { \
|
|
__asm__ __volatile__( \
|
|
" .set push \n" \
|
|
" .set noat \n" \
|
|
" .set mips3 \n" \
|
|
"1: " ld " %0, %2 # __cmpxchg_asm \n" \
|
|
" bne %0, %z3, 2f \n" \
|
|
" .set mips0 \n" \
|
|
" move $1, %z4 \n" \
|
|
" .set mips3 \n" \
|
|
" " st " $1, %1 \n" \
|
|
" beqzl $1, 1b \n" \
|
|
"2: \n" \
|
|
" .set pop \n" \
|
|
: "=&r" (__ret), "=R" (*m) \
|
|
: "R" (*m), "Jr" (old), "Jr" (new) \
|
|
: "memory"); \
|
|
} else if (kernel_uses_llsc) { \
|
|
__asm__ __volatile__( \
|
|
" .set push \n" \
|
|
" .set noat \n" \
|
|
" .set mips3 \n" \
|
|
"1: " ld " %0, %2 # __cmpxchg_asm \n" \
|
|
" bne %0, %z3, 2f \n" \
|
|
" .set mips0 \n" \
|
|
" move $1, %z4 \n" \
|
|
" .set mips3 \n" \
|
|
" " st " $1, %1 \n" \
|
|
" beqz $1, 1b \n" \
|
|
" .set pop \n" \
|
|
"2: \n" \
|
|
: "=&r" (__ret), "=R" (*m) \
|
|
: "R" (*m), "Jr" (old), "Jr" (new) \
|
|
: "memory"); \
|
|
} else { \
|
|
unsigned long __flags; \
|
|
\
|
|
raw_local_irq_save(__flags); \
|
|
__ret = *m; \
|
|
if (__ret == old) \
|
|
*m = new; \
|
|
raw_local_irq_restore(__flags); \
|
|
} \
|
|
\
|
|
__ret; \
|
|
})
|
|
|
|
/*
|
|
* This function doesn't exist, so you'll get a linker error
|
|
* if something tries to do an invalid cmpxchg().
|
|
*/
|
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
|
|
|
#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
|
|
({ \
|
|
__typeof__(ptr) __ptr = (ptr); \
|
|
__typeof__(*(ptr)) __old = (old); \
|
|
__typeof__(*(ptr)) __new = (new); \
|
|
__typeof__(*(ptr)) __res = 0; \
|
|
\
|
|
pre_barrier; \
|
|
\
|
|
switch (sizeof(*(__ptr))) { \
|
|
case 4: \
|
|
__res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
|
|
break; \
|
|
case 8: \
|
|
if (sizeof(long) == 8) { \
|
|
__res = __cmpxchg_asm("lld", "scd", __ptr, \
|
|
__old, __new); \
|
|
break; \
|
|
} \
|
|
default: \
|
|
__cmpxchg_called_with_bad_pointer(); \
|
|
break; \
|
|
} \
|
|
\
|
|
post_barrier; \
|
|
\
|
|
__res; \
|
|
})
|
|
|
|
#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
|
|
#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
|
|
|
|
#define cmpxchg64(ptr, o, n) \
|
|
({ \
|
|
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
|
|
cmpxchg((ptr), (o), (n)); \
|
|
})
|
|
|
|
#ifdef CONFIG_64BIT
|
|
#define cmpxchg64_local(ptr, o, n) \
|
|
({ \
|
|
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
|
|
cmpxchg_local((ptr), (o), (n)); \
|
|
})
|
|
#else
|
|
#include <asm-generic/cmpxchg-local.h>
|
|
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
#endif
|
|
|
|
#endif /* __ASM_CMPXCHG_H */
|