mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-29 23:24:11 +08:00
caab277b1d
Based on 1 normalized pattern(s): this program is free software you can redistribute it and or modify it under the terms of the gnu general public license version 2 as published by the free software foundation this program is distributed in the hope that it will be useful but without any warranty without even the implied warranty of merchantability or fitness for a particular purpose see the gnu general public license for more details you should have received a copy of the gnu general public license along with this program if not see http www gnu org licenses extracted by the scancode license scanner the SPDX license identifier GPL-2.0-only has been chosen to replace the boilerplate/reference in 503 file(s). Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Reviewed-by: Alexios Zavras <alexios.zavras@intel.com> Reviewed-by: Allison Randal <allison@lohutok.net> Reviewed-by: Enrico Weigelt <info@metux.net> Cc: linux-spdx@vger.kernel.org Link: https://lkml.kernel.org/r/20190602204653.811534538@linutronix.de Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
161 lines
3.8 KiB
C
161 lines
3.8 KiB
C
/* SPDX-License-Identifier: GPL-2.0-only */
|
|
/*
|
|
* Based on arch/arm/include/asm/barrier.h
|
|
*
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
*/
|
|
#ifndef __ASM_BARRIER_H
|
|
#define __ASM_BARRIER_H
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
#include <linux/kasan-checks.h>
|
|
|
|
#define __nops(n) ".rept " #n "\nnop\n.endr\n"
|
|
#define nops(n) asm volatile(__nops(n))
|
|
|
|
#define sev() asm volatile("sev" : : : "memory")
|
|
#define wfe() asm volatile("wfe" : : : "memory")
|
|
#define wfi() asm volatile("wfi" : : : "memory")
|
|
|
|
#define isb() asm volatile("isb" : : : "memory")
|
|
#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
|
|
#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
|
|
|
|
#define psb_csync() asm volatile("hint #17" : : : "memory")
|
|
#define csdb() asm volatile("hint #20" : : : "memory")
|
|
|
|
#define spec_bar() asm volatile(ALTERNATIVE("dsb nsh\nisb\n", \
|
|
SB_BARRIER_INSN"nop\n", \
|
|
ARM64_HAS_SB))
|
|
|
|
#define mb() dsb(sy)
|
|
#define rmb() dsb(ld)
|
|
#define wmb() dsb(st)
|
|
|
|
#define dma_rmb() dmb(oshld)
|
|
#define dma_wmb() dmb(oshst)
|
|
|
|
/*
|
|
* Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
|
|
* and 0 otherwise.
|
|
*/
|
|
#define array_index_mask_nospec array_index_mask_nospec
|
|
static inline unsigned long array_index_mask_nospec(unsigned long idx,
|
|
unsigned long sz)
|
|
{
|
|
unsigned long mask;
|
|
|
|
asm volatile(
|
|
" cmp %1, %2\n"
|
|
" sbc %0, xzr, xzr\n"
|
|
: "=r" (mask)
|
|
: "r" (idx), "Ir" (sz)
|
|
: "cc");
|
|
|
|
csdb();
|
|
return mask;
|
|
}
|
|
|
|
#define __smp_mb() dmb(ish)
|
|
#define __smp_rmb() dmb(ishld)
|
|
#define __smp_wmb() dmb(ishst)
|
|
|
|
#define __smp_store_release(p, v) \
|
|
do { \
|
|
typeof(p) __p = (p); \
|
|
union { typeof(*p) __val; char __c[1]; } __u = \
|
|
{ .__val = (__force typeof(*p)) (v) }; \
|
|
compiletime_assert_atomic_type(*p); \
|
|
kasan_check_write(__p, sizeof(*p)); \
|
|
switch (sizeof(*p)) { \
|
|
case 1: \
|
|
asm volatile ("stlrb %w1, %0" \
|
|
: "=Q" (*__p) \
|
|
: "r" (*(__u8 *)__u.__c) \
|
|
: "memory"); \
|
|
break; \
|
|
case 2: \
|
|
asm volatile ("stlrh %w1, %0" \
|
|
: "=Q" (*__p) \
|
|
: "r" (*(__u16 *)__u.__c) \
|
|
: "memory"); \
|
|
break; \
|
|
case 4: \
|
|
asm volatile ("stlr %w1, %0" \
|
|
: "=Q" (*__p) \
|
|
: "r" (*(__u32 *)__u.__c) \
|
|
: "memory"); \
|
|
break; \
|
|
case 8: \
|
|
asm volatile ("stlr %1, %0" \
|
|
: "=Q" (*__p) \
|
|
: "r" (*(__u64 *)__u.__c) \
|
|
: "memory"); \
|
|
break; \
|
|
} \
|
|
} while (0)
|
|
|
|
#define __smp_load_acquire(p) \
|
|
({ \
|
|
union { typeof(*p) __val; char __c[1]; } __u; \
|
|
typeof(p) __p = (p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
kasan_check_read(__p, sizeof(*p)); \
|
|
switch (sizeof(*p)) { \
|
|
case 1: \
|
|
asm volatile ("ldarb %w0, %1" \
|
|
: "=r" (*(__u8 *)__u.__c) \
|
|
: "Q" (*__p) : "memory"); \
|
|
break; \
|
|
case 2: \
|
|
asm volatile ("ldarh %w0, %1" \
|
|
: "=r" (*(__u16 *)__u.__c) \
|
|
: "Q" (*__p) : "memory"); \
|
|
break; \
|
|
case 4: \
|
|
asm volatile ("ldar %w0, %1" \
|
|
: "=r" (*(__u32 *)__u.__c) \
|
|
: "Q" (*__p) : "memory"); \
|
|
break; \
|
|
case 8: \
|
|
asm volatile ("ldar %0, %1" \
|
|
: "=r" (*(__u64 *)__u.__c) \
|
|
: "Q" (*__p) : "memory"); \
|
|
break; \
|
|
} \
|
|
__u.__val; \
|
|
})
|
|
|
|
#define smp_cond_load_relaxed(ptr, cond_expr) \
|
|
({ \
|
|
typeof(ptr) __PTR = (ptr); \
|
|
typeof(*ptr) VAL; \
|
|
for (;;) { \
|
|
VAL = READ_ONCE(*__PTR); \
|
|
if (cond_expr) \
|
|
break; \
|
|
__cmpwait_relaxed(__PTR, VAL); \
|
|
} \
|
|
VAL; \
|
|
})
|
|
|
|
#define smp_cond_load_acquire(ptr, cond_expr) \
|
|
({ \
|
|
typeof(ptr) __PTR = (ptr); \
|
|
typeof(*ptr) VAL; \
|
|
for (;;) { \
|
|
VAL = smp_load_acquire(__PTR); \
|
|
if (cond_expr) \
|
|
break; \
|
|
__cmpwait_relaxed(__PTR, VAL); \
|
|
} \
|
|
VAL; \
|
|
})
|
|
|
|
#include <asm-generic/barrier.h>
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
#endif /* __ASM_BARRIER_H */
|