mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-27 14:14:24 +08:00
percpu: Add {raw,this}_cpu_try_cmpxchg()
Add the try_cmpxchg() form to the per-cpu ops. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Mark Rutland <mark.rutland@arm.com> Tested-by: Mark Rutland <mark.rutland@arm.com> Link: https://lore.kernel.org/r/20230531132323.587480729@infradead.org
This commit is contained in:
parent
8c8b096a23
commit
c5c0ba953b
@ -89,16 +89,37 @@ do { \
|
|||||||
__ret; \
|
__ret; \
|
||||||
})
|
})
|
||||||
|
|
||||||
#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
|
#define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \
|
||||||
|
({ \
|
||||||
|
typeof(pcp) __val, __old = *(ovalp); \
|
||||||
|
__val = _cmpxchg(pcp, __old, nval); \
|
||||||
|
if (__val != __old) \
|
||||||
|
*(ovalp) = __val; \
|
||||||
|
__val == __old; \
|
||||||
|
})
|
||||||
|
|
||||||
|
#define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \
|
||||||
({ \
|
({ \
|
||||||
typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
|
typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
|
||||||
typeof(pcp) __ret; \
|
typeof(pcp) __val = *__p, __old = *(ovalp); \
|
||||||
__ret = *__p; \
|
bool __ret; \
|
||||||
if (__ret == (oval)) \
|
if (__val == __old) { \
|
||||||
*__p = nval; \
|
*__p = nval; \
|
||||||
|
__ret = true; \
|
||||||
|
} else { \
|
||||||
|
*(ovalp) = __val; \
|
||||||
|
__ret = false; \
|
||||||
|
} \
|
||||||
__ret; \
|
__ret; \
|
||||||
})
|
})
|
||||||
|
|
||||||
|
#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
|
||||||
|
({ \
|
||||||
|
typeof(pcp) __old = (oval); \
|
||||||
|
raw_cpu_generic_try_cmpxchg(pcp, &__old, nval); \
|
||||||
|
__old; \
|
||||||
|
})
|
||||||
|
|
||||||
#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
||||||
({ \
|
({ \
|
||||||
typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \
|
typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \
|
||||||
@ -170,6 +191,16 @@ do { \
|
|||||||
__ret; \
|
__ret; \
|
||||||
})
|
})
|
||||||
|
|
||||||
|
#define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \
|
||||||
|
({ \
|
||||||
|
bool __ret; \
|
||||||
|
unsigned long __flags; \
|
||||||
|
raw_local_irq_save(__flags); \
|
||||||
|
__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval); \
|
||||||
|
raw_local_irq_restore(__flags); \
|
||||||
|
__ret; \
|
||||||
|
})
|
||||||
|
|
||||||
#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
|
#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
|
||||||
({ \
|
({ \
|
||||||
typeof(pcp) __ret; \
|
typeof(pcp) __ret; \
|
||||||
@ -282,6 +313,43 @@ do { \
|
|||||||
#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
|
#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef raw_cpu_try_cmpxchg_1
|
||||||
|
#ifdef raw_cpu_cmpxchg_1
|
||||||
|
#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
|
||||||
|
#else
|
||||||
|
#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
|
||||||
|
raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef raw_cpu_try_cmpxchg_2
|
||||||
|
#ifdef raw_cpu_cmpxchg_2
|
||||||
|
#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
|
||||||
|
#else
|
||||||
|
#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
|
||||||
|
raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef raw_cpu_try_cmpxchg_4
|
||||||
|
#ifdef raw_cpu_cmpxchg_4
|
||||||
|
#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
|
||||||
|
#else
|
||||||
|
#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
|
||||||
|
raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef raw_cpu_try_cmpxchg_8
|
||||||
|
#ifdef raw_cpu_cmpxchg_8
|
||||||
|
#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
|
||||||
|
#else
|
||||||
|
#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
|
||||||
|
raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifndef raw_cpu_cmpxchg_1
|
#ifndef raw_cpu_cmpxchg_1
|
||||||
#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
|
#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
|
||||||
raw_cpu_generic_cmpxchg(pcp, oval, nval)
|
raw_cpu_generic_cmpxchg(pcp, oval, nval)
|
||||||
@ -407,6 +475,43 @@ do { \
|
|||||||
#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
|
#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef this_cpu_try_cmpxchg_1
|
||||||
|
#ifdef this_cpu_cmpxchg_1
|
||||||
|
#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
|
||||||
|
#else
|
||||||
|
#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
|
||||||
|
this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef this_cpu_try_cmpxchg_2
|
||||||
|
#ifdef this_cpu_cmpxchg_2
|
||||||
|
#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
|
||||||
|
#else
|
||||||
|
#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
|
||||||
|
this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef this_cpu_try_cmpxchg_4
|
||||||
|
#ifdef this_cpu_cmpxchg_4
|
||||||
|
#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
|
||||||
|
#else
|
||||||
|
#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
|
||||||
|
this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifndef this_cpu_try_cmpxchg_8
|
||||||
|
#ifdef this_cpu_cmpxchg_8
|
||||||
|
#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
|
||||||
|
__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
|
||||||
|
#else
|
||||||
|
#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
|
||||||
|
this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifndef this_cpu_cmpxchg_1
|
#ifndef this_cpu_cmpxchg_1
|
||||||
#define this_cpu_cmpxchg_1(pcp, oval, nval) \
|
#define this_cpu_cmpxchg_1(pcp, oval, nval) \
|
||||||
this_cpu_generic_cmpxchg(pcp, oval, nval)
|
this_cpu_generic_cmpxchg(pcp, oval, nval)
|
||||||
|
@ -343,6 +343,21 @@ static __always_inline void __this_cpu_preempt_check(const char *op) { }
|
|||||||
pscr2_ret__; \
|
pscr2_ret__; \
|
||||||
})
|
})
|
||||||
|
|
||||||
|
#define __pcpu_size_call_return2bool(stem, variable, ...) \
|
||||||
|
({ \
|
||||||
|
bool pscr2_ret__; \
|
||||||
|
__verify_pcpu_ptr(&(variable)); \
|
||||||
|
switch(sizeof(variable)) { \
|
||||||
|
case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
|
||||||
|
case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
|
||||||
|
case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
|
||||||
|
case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
|
||||||
|
default: \
|
||||||
|
__bad_size_call_parameter(); break; \
|
||||||
|
} \
|
||||||
|
pscr2_ret__; \
|
||||||
|
})
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Special handling for cmpxchg_double. cmpxchg_double is passed two
|
* Special handling for cmpxchg_double. cmpxchg_double is passed two
|
||||||
* percpu variables. The first has to be aligned to a double word
|
* percpu variables. The first has to be aligned to a double word
|
||||||
@ -426,6 +441,8 @@ do { \
|
|||||||
#define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
|
#define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
|
||||||
#define raw_cpu_cmpxchg(pcp, oval, nval) \
|
#define raw_cpu_cmpxchg(pcp, oval, nval) \
|
||||||
__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
|
__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
|
||||||
|
#define raw_cpu_try_cmpxchg(pcp, ovalp, nval) \
|
||||||
|
__pcpu_size_call_return2bool(raw_cpu_try_cmpxchg_, pcp, ovalp, nval)
|
||||||
#define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
#define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
||||||
__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
|
__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
|
||||||
|
|
||||||
@ -513,6 +530,8 @@ do { \
|
|||||||
#define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
|
#define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
|
||||||
#define this_cpu_cmpxchg(pcp, oval, nval) \
|
#define this_cpu_cmpxchg(pcp, oval, nval) \
|
||||||
__pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
|
__pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
|
||||||
|
#define this_cpu_try_cmpxchg(pcp, ovalp, nval) \
|
||||||
|
__pcpu_size_call_return2bool(this_cpu_try_cmpxchg_, pcp, ovalp, nval)
|
||||||
#define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
#define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
||||||
__pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
|
__pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user