sparc32: add __cmpxchg_u{8,16}() and teach __cmpxchg() to handle those sizes

trivial now

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Signed-off-by: Paul E. McKenney <paulmck@kernel.org>
This commit is contained in:
Al Viro 2024-04-01 22:17:49 -04:00 committed by Paul E. McKenney
parent d7b52b48f0
commit dbc93fdcdc
2 changed files with 11 additions and 9 deletions

View File

@ -38,21 +38,19 @@ static __always_inline unsigned long __arch_xchg(unsigned long x, __volatile__ v
/* bug catcher for when unsupported size is used - won't link */
void __cmpxchg_called_with_bad_pointer(void);
/* we only need to support cmpxchg of a u32 on sparc */
u8 __cmpxchg_u8(volatile u8 *m, u8 old, u8 new_);
u16 __cmpxchg_u16(volatile u16 *m, u16 old, u16 new_);
u32 __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
/* don't worry...optimizer will get rid of most of this */
static inline unsigned long
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
{
switch (size) {
case 4:
return __cmpxchg_u32(ptr, old, new_);
default:
__cmpxchg_called_with_bad_pointer();
break;
}
return old;
return
size == 1 ? __cmpxchg_u8(ptr, old, new_) :
size == 2 ? __cmpxchg_u16(ptr, old, new_) :
size == 4 ? __cmpxchg_u32(ptr, old, new_) :
(__cmpxchg_called_with_bad_pointer(), old);
}
#define arch_cmpxchg(ptr, o, n) \

View File

@ -173,8 +173,12 @@ EXPORT_SYMBOL(sp32___change_bit);
return prev; \
}
CMPXCHG(u8)
CMPXCHG(u16)
CMPXCHG(u32)
CMPXCHG(u64)
EXPORT_SYMBOL(__cmpxchg_u8);
EXPORT_SYMBOL(__cmpxchg_u16);
EXPORT_SYMBOL(__cmpxchg_u32);
EXPORT_SYMBOL(__cmpxchg_u64);