locking/xchg/alpha: Clean up barrier usage by using smp_mb() in place of __ASM__MB

Replace each occurrence of __ASM__MB with a (trailing) smp_mb() in
xchg(), cmpxchg(), and remove the now unused __ASM__MB definitions;
this improves readability, with no additional synchronization cost.

Suggested-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Andrea Parri <parri.andrea@gmail.com>
Acked-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Alan Stern <stern@rowland.harvard.edu>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Matt Turner <mattst88@gmail.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Richard Henderson <rth@twiddle.net>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: linux-alpha@vger.kernel.org
Link: http://lkml.kernel.org/r/1519291469-5702-1-git-send-email-parri.andrea@gmail.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
Andrea Parri 2018-02-22 10:24:29 +01:00 committed by Ingo Molnar
parent cb13b424e9
commit 79d442461d
2 changed files with 8 additions and 14 deletions

View File

@ -6,7 +6,6 @@
* Atomic exchange routines. * Atomic exchange routines.
*/ */
#define __ASM__MB
#define ____xchg(type, args...) __xchg ## type ## _local(args) #define ____xchg(type, args...) __xchg ## type ## _local(args)
#define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args) #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args)
#include <asm/xchg.h> #include <asm/xchg.h>
@ -33,10 +32,6 @@
cmpxchg_local((ptr), (o), (n)); \ cmpxchg_local((ptr), (o), (n)); \
}) })
#ifdef CONFIG_SMP
#undef __ASM__MB
#define __ASM__MB "\tmb\n"
#endif
#undef ____xchg #undef ____xchg
#undef ____cmpxchg #undef ____cmpxchg
#define ____xchg(type, args...) __xchg ##type(args) #define ____xchg(type, args...) __xchg ##type(args)
@ -64,7 +59,6 @@
cmpxchg((ptr), (o), (n)); \ cmpxchg((ptr), (o), (n)); \
}) })
#undef __ASM__MB
#undef ____cmpxchg #undef ____cmpxchg
#endif /* _ALPHA_CMPXCHG_H */ #endif /* _ALPHA_CMPXCHG_H */

View File

@ -28,12 +28,12 @@ ____xchg(_u8, volatile char *m, unsigned long val)
" or %1,%2,%2\n" " or %1,%2,%2\n"
" stq_c %2,0(%3)\n" " stq_c %2,0(%3)\n"
" beq %2,2f\n" " beq %2,2f\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"2: br 1b\n" "2: br 1b\n"
".previous" ".previous"
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
: "r" ((long)m), "1" (val) : "memory"); : "r" ((long)m), "1" (val) : "memory");
smp_mb();
return ret; return ret;
} }
@ -52,12 +52,12 @@ ____xchg(_u16, volatile short *m, unsigned long val)
" or %1,%2,%2\n" " or %1,%2,%2\n"
" stq_c %2,0(%3)\n" " stq_c %2,0(%3)\n"
" beq %2,2f\n" " beq %2,2f\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"2: br 1b\n" "2: br 1b\n"
".previous" ".previous"
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
: "r" ((long)m), "1" (val) : "memory"); : "r" ((long)m), "1" (val) : "memory");
smp_mb();
return ret; return ret;
} }
@ -72,12 +72,12 @@ ____xchg(_u32, volatile int *m, unsigned long val)
" bis $31,%3,%1\n" " bis $31,%3,%1\n"
" stl_c %1,%2\n" " stl_c %1,%2\n"
" beq %1,2f\n" " beq %1,2f\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"2: br 1b\n" "2: br 1b\n"
".previous" ".previous"
: "=&r" (val), "=&r" (dummy), "=m" (*m) : "=&r" (val), "=&r" (dummy), "=m" (*m)
: "rI" (val), "m" (*m) : "memory"); : "rI" (val), "m" (*m) : "memory");
smp_mb();
return val; return val;
} }
@ -92,12 +92,12 @@ ____xchg(_u64, volatile long *m, unsigned long val)
" bis $31,%3,%1\n" " bis $31,%3,%1\n"
" stq_c %1,%2\n" " stq_c %1,%2\n"
" beq %1,2f\n" " beq %1,2f\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"2: br 1b\n" "2: br 1b\n"
".previous" ".previous"
: "=&r" (val), "=&r" (dummy), "=m" (*m) : "=&r" (val), "=&r" (dummy), "=m" (*m)
: "rI" (val), "m" (*m) : "memory"); : "rI" (val), "m" (*m) : "memory");
smp_mb();
return val; return val;
} }
@ -150,12 +150,12 @@ ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
" stq_c %2,0(%4)\n" " stq_c %2,0(%4)\n"
" beq %2,3f\n" " beq %2,3f\n"
"2:\n" "2:\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"3: br 1b\n" "3: br 1b\n"
".previous" ".previous"
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
: "r" ((long)m), "Ir" (old), "1" (new) : "memory"); : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
smp_mb();
return prev; return prev;
} }
@ -177,12 +177,12 @@ ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
" stq_c %2,0(%4)\n" " stq_c %2,0(%4)\n"
" beq %2,3f\n" " beq %2,3f\n"
"2:\n" "2:\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"3: br 1b\n" "3: br 1b\n"
".previous" ".previous"
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
: "r" ((long)m), "Ir" (old), "1" (new) : "memory"); : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
smp_mb();
return prev; return prev;
} }
@ -200,12 +200,12 @@ ____cmpxchg(_u32, volatile int *m, int old, int new)
" stl_c %1,%2\n" " stl_c %1,%2\n"
" beq %1,3f\n" " beq %1,3f\n"
"2:\n" "2:\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"3: br 1b\n" "3: br 1b\n"
".previous" ".previous"
: "=&r"(prev), "=&r"(cmp), "=m"(*m) : "=&r"(prev), "=&r"(cmp), "=m"(*m)
: "r"((long) old), "r"(new), "m"(*m) : "memory"); : "r"((long) old), "r"(new), "m"(*m) : "memory");
smp_mb();
return prev; return prev;
} }
@ -223,12 +223,12 @@ ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
" stq_c %1,%2\n" " stq_c %1,%2\n"
" beq %1,3f\n" " beq %1,3f\n"
"2:\n" "2:\n"
__ASM__MB
".subsection 2\n" ".subsection 2\n"
"3: br 1b\n" "3: br 1b\n"
".previous" ".previous"
: "=&r"(prev), "=&r"(cmp), "=m"(*m) : "=&r"(prev), "=&r"(cmp), "=m"(*m)
: "r"((long) old), "r"(new), "m"(*m) : "memory"); : "r"((long) old), "r"(new), "m"(*m) : "memory");
smp_mb();
return prev; return prev;
} }