xref: /linux/arch/arm64/include/asm/barrier.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20 
21 #ifndef __ASSEMBLY__
22 
23 #define sev()		asm volatile("sev" : : : "memory")
24 #define wfe()		asm volatile("wfe" : : : "memory")
25 #define wfi()		asm volatile("wfi" : : : "memory")
26 
27 #define isb()		asm volatile("isb" : : : "memory")
28 #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
30 
31 #define mb()		dsb(sy)
32 #define rmb()		dsb(ld)
33 #define wmb()		dsb(st)
34 
35 #define dma_rmb()	dmb(oshld)
36 #define dma_wmb()	dmb(oshst)
37 
38 #define smp_mb()	dmb(ish)
39 #define smp_rmb()	dmb(ishld)
40 #define smp_wmb()	dmb(ishst)
41 
42 #define smp_store_release(p, v)						\
43 do {									\
44 	compiletime_assert_atomic_type(*p);				\
45 	switch (sizeof(*p)) {						\
46 	case 1:								\
47 		asm volatile ("stlrb %w1, %0"				\
48 				: "=Q" (*p) : "r" (v) : "memory");	\
49 		break;							\
50 	case 2:								\
51 		asm volatile ("stlrh %w1, %0"				\
52 				: "=Q" (*p) : "r" (v) : "memory");	\
53 		break;							\
54 	case 4:								\
55 		asm volatile ("stlr %w1, %0"				\
56 				: "=Q" (*p) : "r" (v) : "memory");	\
57 		break;							\
58 	case 8:								\
59 		asm volatile ("stlr %1, %0"				\
60 				: "=Q" (*p) : "r" (v) : "memory");	\
61 		break;							\
62 	}								\
63 } while (0)
64 
65 #define smp_load_acquire(p)						\
66 ({									\
67 	typeof(*p) ___p1;						\
68 	compiletime_assert_atomic_type(*p);				\
69 	switch (sizeof(*p)) {						\
70 	case 1:								\
71 		asm volatile ("ldarb %w0, %1"				\
72 			: "=r" (___p1) : "Q" (*p) : "memory");		\
73 		break;							\
74 	case 2:								\
75 		asm volatile ("ldarh %w0, %1"				\
76 			: "=r" (___p1) : "Q" (*p) : "memory");		\
77 		break;							\
78 	case 4:								\
79 		asm volatile ("ldar %w0, %1"				\
80 			: "=r" (___p1) : "Q" (*p) : "memory");		\
81 		break;							\
82 	case 8:								\
83 		asm volatile ("ldar %0, %1"				\
84 			: "=r" (___p1) : "Q" (*p) : "memory");		\
85 		break;							\
86 	}								\
87 	___p1;								\
88 })
89 
90 #define read_barrier_depends()		do { } while(0)
91 #define smp_read_barrier_depends()	do { } while(0)
92 
93 #define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)
94 #define nop()		asm volatile("nop");
95 
96 #define smp_mb__before_atomic()	smp_mb()
97 #define smp_mb__after_atomic()	smp_mb()
98 
99 #endif	/* __ASSEMBLY__ */
100 
101 #endif	/* __ASM_BARRIER_H */
102