From 90ff6a17d0e07d689886cba4244674bfd41e7a2d Mon Sep 17 00:00:00 2001 From: "Michael S. Tsirkin" Date: Mon, 21 Dec 2015 09:22:18 +0200 Subject: arm64: reuse asm-generic/barrier.h On arm64 nop, read_barrier_depends, smp_read_barrier_depends smp_store_mb(), smp_mb__before_atomic and smp_mb__after_atomic match the asm-generic variants exactly. Drop the local definitions and pull in asm-generic/barrier.h instead. This is in preparation to refactoring this code area. Signed-off-by: Michael S. Tsirkin Acked-by: Arnd Bergmann Acked-by: Peter Zijlstra (Intel) --- arch/arm64/include/asm/barrier.h | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) (limited to 'arch/arm64') diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 9622eb48f894..91a43f48914d 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -91,14 +91,7 @@ do { \ __u.__val; \ }) -#define read_barrier_depends() do { } while(0) -#define smp_read_barrier_depends() do { } while(0) - -#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0) -#define nop() asm volatile("nop"); - -#define smp_mb__before_atomic() smp_mb() -#define smp_mb__after_atomic() smp_mb() +#include #endif /* __ASSEMBLY__ */ -- cgit v1.2.3 From fd072df850e536bf46e9981be4be95961ce5eef3 Mon Sep 17 00:00:00 2001 From: "Michael S. Tsirkin" Date: Sun, 27 Dec 2015 15:04:42 +0200 Subject: arm64: define __smp_xxx This defines __smp_xxx barriers for arm64, for use by virtualization. smp_xxx barriers are removed as they are defined correctly by asm-generic/barriers.h Note: arm64 does not support !SMP config, so smp_xxx and __smp_xxx are always equivalent. Signed-off-by: Michael S. Tsirkin Acked-by: Arnd Bergmann Acked-by: Peter Zijlstra (Intel) --- arch/arm64/include/asm/barrier.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'arch/arm64') diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 91a43f48914d..dae5c49618db 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -35,11 +35,11 @@ #define dma_rmb() dmb(oshld) #define dma_wmb() dmb(oshst) -#define smp_mb() dmb(ish) -#define smp_rmb() dmb(ishld) -#define smp_wmb() dmb(ishst) +#define __smp_mb() dmb(ish) +#define __smp_rmb() dmb(ishld) +#define __smp_wmb() dmb(ishst) -#define smp_store_release(p, v) \ +#define __smp_store_release(p, v) \ do { \ compiletime_assert_atomic_type(*p); \ switch (sizeof(*p)) { \ @@ -62,7 +62,7 @@ do { \ } \ } while (0) -#define smp_load_acquire(p) \ +#define __smp_load_acquire(p) \ ({ \ union { typeof(*p) __val; char __c[1]; } __u; \ compiletime_assert_atomic_type(*p); \ -- cgit v1.2.3