1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_BARRIER_H
3 #define __ASM_BARRIER_H
4
5 #ifndef __ASSEMBLY__
6
7 #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
8
9 #if __LINUX_ARM_ARCH__ >= 7 || \
10 (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
11 #define sev() __asm__ __volatile__ ("sev" : : : "memory")
12 #define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
13 #define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
14 #else
15 #define wfe() do { } while (0)
16 #endif
17
18 #if __LINUX_ARM_ARCH__ >= 7
19 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
20 #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
22 #ifdef CONFIG_THUMB2_KERNEL
23 #define CSDB ".inst.w 0xf3af8014"
24 #else
25 #define CSDB ".inst 0xe320f014"
26 #endif
27 #define csdb() __asm__ __volatile__(CSDB : : : "memory")
28 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
29 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
30 : : "r" (0) : "memory")
31 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
32 : : "r" (0) : "memory")
33 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
34 : : "r" (0) : "memory")
35 #elif defined(CONFIG_CPU_FA526)
36 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
37 : : "r" (0) : "memory")
38 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
39 : : "r" (0) : "memory")
40 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
41 #else
42 #define isb(x) __asm__ __volatile__ ("" : : : "memory")
43 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
44 : : "r" (0) : "memory")
45 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
46 #endif
47
48 #ifndef CSDB
49 #define CSDB
50 #endif
51 #ifndef csdb
52 #define csdb()
53 #endif
54
55 #ifdef CONFIG_ARM_HEAVY_MB
56 extern void (*soc_mb)(void);
57 extern void arm_heavy_mb(void);
58 #define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
59 #else
60 #define __arm_heavy_mb(x...) dsb(x)
61 #endif
62
63 #if defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
64 #define mb() __arm_heavy_mb()
65 #define rmb() dsb()
66 #define wmb() __arm_heavy_mb(st)
67 #define dma_rmb() dmb(osh)
68 #define dma_wmb() dmb(oshst)
69 #else
70 #define mb() barrier()
71 #define rmb() barrier()
72 #define wmb() barrier()
73 #define dma_rmb() barrier()
74 #define dma_wmb() barrier()
75 #endif
76
77 #define __smp_mb() dmb(ish)
78 #define __smp_rmb() __smp_mb()
79 #define __smp_wmb() dmb(ishst)
80
81 #ifdef CONFIG_CPU_SPECTRE
array_index_mask_nospec(unsigned long idx,unsigned long sz)82 static inline unsigned long array_index_mask_nospec(unsigned long idx,
83 unsigned long sz)
84 {
85 unsigned long mask;
86
87 asm volatile(
88 "cmp %1, %2\n"
89 " sbc %0, %1, %1\n"
90 CSDB
91 : "=r" (mask)
92 : "r" (idx), "Ir" (sz)
93 : "cc");
94
95 return mask;
96 }
97 #define array_index_mask_nospec array_index_mask_nospec
98 #endif
99
100 #include <asm-generic/barrier.h>
101
102 #endif /* !__ASSEMBLY__ */
103 #endif /* __ASM_BARRIER_H */
104