xref: /linux/arch/arm/include/asm/barrier.h (revision c0e297dc61f8d4453e07afbea1fa8d0e67cd4a34)
1 #ifndef __ASM_BARRIER_H
2 #define __ASM_BARRIER_H
3 
4 #ifndef __ASSEMBLY__
5 #include <asm/outercache.h>
6 
7 #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
8 
9 #if __LINUX_ARM_ARCH__ >= 7 ||		\
10 	(__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
11 #define sev()	__asm__ __volatile__ ("sev" : : : "memory")
12 #define wfe()	__asm__ __volatile__ ("wfe" : : : "memory")
13 #define wfi()	__asm__ __volatile__ ("wfi" : : : "memory")
14 #endif
15 
16 #if __LINUX_ARM_ARCH__ >= 7
17 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
18 #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
19 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
20 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
21 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
22 				    : : "r" (0) : "memory")
23 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
24 				    : : "r" (0) : "memory")
25 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
26 				    : : "r" (0) : "memory")
27 #elif defined(CONFIG_CPU_FA526)
28 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
29 				    : : "r" (0) : "memory")
30 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
31 				    : : "r" (0) : "memory")
32 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
33 #else
34 #define isb(x) __asm__ __volatile__ ("" : : : "memory")
35 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
36 				    : : "r" (0) : "memory")
37 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
38 #endif
39 
40 #ifdef CONFIG_ARCH_HAS_BARRIERS
41 #include <mach/barriers.h>
42 #elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
43 #define mb()		do { dsb(); outer_sync(); } while (0)
44 #define rmb()		dsb()
45 #define wmb()		do { dsb(st); outer_sync(); } while (0)
46 #define dma_rmb()	dmb(osh)
47 #define dma_wmb()	dmb(oshst)
48 #else
49 #define mb()		barrier()
50 #define rmb()		barrier()
51 #define wmb()		barrier()
52 #define dma_rmb()	barrier()
53 #define dma_wmb()	barrier()
54 #endif
55 
56 #ifndef CONFIG_SMP
57 #define smp_mb()	barrier()
58 #define smp_rmb()	barrier()
59 #define smp_wmb()	barrier()
60 #else
61 #define smp_mb()	dmb(ish)
62 #define smp_rmb()	smp_mb()
63 #define smp_wmb()	dmb(ishst)
64 #endif
65 
66 #define smp_store_release(p, v)						\
67 do {									\
68 	compiletime_assert_atomic_type(*p);				\
69 	smp_mb();							\
70 	ACCESS_ONCE(*p) = (v);						\
71 } while (0)
72 
73 #define smp_load_acquire(p)						\
74 ({									\
75 	typeof(*p) ___p1 = ACCESS_ONCE(*p);				\
76 	compiletime_assert_atomic_type(*p);				\
77 	smp_mb();							\
78 	___p1;								\
79 })
80 
81 #define read_barrier_depends()		do { } while(0)
82 #define smp_read_barrier_depends()	do { } while(0)
83 
84 #define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)
85 
86 #define smp_mb__before_atomic()	smp_mb()
87 #define smp_mb__after_atomic()	smp_mb()
88 
89 #endif /* !__ASSEMBLY__ */
90 #endif /* __ASM_BARRIER_H */
91