xref: /linux/arch/arm/include/asm/barrier.h (revision cc04a46f11ea046ed53e2c832ae29e4790f7e35f)
1 #ifndef __ASM_BARRIER_H
2 #define __ASM_BARRIER_H
3 
4 #ifndef __ASSEMBLY__
5 
6 #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
7 
8 #if __LINUX_ARM_ARCH__ >= 7 ||		\
9 	(__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
10 #define sev()	__asm__ __volatile__ ("sev" : : : "memory")
11 #define wfe()	__asm__ __volatile__ ("wfe" : : : "memory")
12 #define wfi()	__asm__ __volatile__ ("wfi" : : : "memory")
13 #endif
14 
15 #if __LINUX_ARM_ARCH__ >= 7
16 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
17 #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
18 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
19 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
20 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
21 				    : : "r" (0) : "memory")
22 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
23 				    : : "r" (0) : "memory")
24 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
25 				    : : "r" (0) : "memory")
26 #elif defined(CONFIG_CPU_FA526)
27 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
28 				    : : "r" (0) : "memory")
29 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
30 				    : : "r" (0) : "memory")
31 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
32 #else
33 #define isb(x) __asm__ __volatile__ ("" : : : "memory")
34 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
35 				    : : "r" (0) : "memory")
36 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
37 #endif
38 
39 #ifdef CONFIG_ARM_HEAVY_MB
40 extern void (*soc_mb)(void);
41 extern void arm_heavy_mb(void);
42 #define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
43 #else
44 #define __arm_heavy_mb(x...) dsb(x)
45 #endif
46 
47 #ifdef CONFIG_ARCH_HAS_BARRIERS
48 #include <mach/barriers.h>
49 #elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
50 #define mb()		__arm_heavy_mb()
51 #define rmb()		dsb()
52 #define wmb()		__arm_heavy_mb(st)
53 #define dma_rmb()	dmb(osh)
54 #define dma_wmb()	dmb(oshst)
55 #else
56 #define mb()		barrier()
57 #define rmb()		barrier()
58 #define wmb()		barrier()
59 #define dma_rmb()	barrier()
60 #define dma_wmb()	barrier()
61 #endif
62 
63 #ifndef CONFIG_SMP
64 #define smp_mb()	barrier()
65 #define smp_rmb()	barrier()
66 #define smp_wmb()	barrier()
67 #else
68 #define smp_mb()	dmb(ish)
69 #define smp_rmb()	smp_mb()
70 #define smp_wmb()	dmb(ishst)
71 #endif
72 
73 #define smp_store_release(p, v)						\
74 do {									\
75 	compiletime_assert_atomic_type(*p);				\
76 	smp_mb();							\
77 	WRITE_ONCE(*p, v);						\
78 } while (0)
79 
80 #define smp_load_acquire(p)						\
81 ({									\
82 	typeof(*p) ___p1 = READ_ONCE(*p);				\
83 	compiletime_assert_atomic_type(*p);				\
84 	smp_mb();							\
85 	___p1;								\
86 })
87 
88 #define read_barrier_depends()		do { } while(0)
89 #define smp_read_barrier_depends()	do { } while(0)
90 
91 #define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)
92 
93 #define smp_mb__before_atomic()	smp_mb()
94 #define smp_mb__after_atomic()	smp_mb()
95 
96 #endif /* !__ASSEMBLY__ */
97 #endif /* __ASM_BARRIER_H */
98