xref: /linux/tools/testing/selftests/rseq/rseq-arm64.h (revision 0526b56cbc3c489642bd6a5fe4b718dea7ef0ee8)
1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2 /*
3  * rseq-arm64.h
4  *
5  * (C) Copyright 2016-2022 - Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6  * (C) Copyright 2018 - Will Deacon <will.deacon@arm.com>
7  */
8 
9 /*
10  * aarch64 -mbig-endian generates mixed endianness code vs data:
11  * little-endian code and big-endian data. Ensure the RSEQ_SIG signature
12  * matches code endianness.
13  */
14 #define RSEQ_SIG_CODE	0xd428bc00	/* BRK #0x45E0.  */
15 
16 #ifdef __AARCH64EB__
17 #define RSEQ_SIG_DATA	0x00bc28d4	/* BRK #0x45E0.  */
18 #else
19 #define RSEQ_SIG_DATA	RSEQ_SIG_CODE
20 #endif
21 
22 #define RSEQ_SIG	RSEQ_SIG_DATA
23 
24 #define rseq_smp_mb()	__asm__ __volatile__ ("dmb ish" ::: "memory")
25 #define rseq_smp_rmb()	__asm__ __volatile__ ("dmb ishld" ::: "memory")
26 #define rseq_smp_wmb()	__asm__ __volatile__ ("dmb ishst" ::: "memory")
27 
28 #define rseq_smp_load_acquire(p)						\
29 __extension__ ({								\
30 	__typeof(*p) ____p1;							\
31 	switch (sizeof(*p)) {							\
32 	case 1:									\
33 		asm volatile ("ldarb %w0, %1"					\
34 			: "=r" (*(__u8 *)p)					\
35 			: "Q" (*p) : "memory");					\
36 		break;								\
37 	case 2:									\
38 		asm volatile ("ldarh %w0, %1"					\
39 			: "=r" (*(__u16 *)p)					\
40 			: "Q" (*p) : "memory");					\
41 		break;								\
42 	case 4:									\
43 		asm volatile ("ldar %w0, %1"					\
44 			: "=r" (*(__u32 *)p)					\
45 			: "Q" (*p) : "memory");					\
46 		break;								\
47 	case 8:									\
48 		asm volatile ("ldar %0, %1"					\
49 			: "=r" (*(__u64 *)p)					\
50 			: "Q" (*p) : "memory");					\
51 		break;								\
52 	}									\
53 	____p1;									\
54 })
55 
56 #define rseq_smp_acquire__after_ctrl_dep()	rseq_smp_rmb()
57 
58 #define rseq_smp_store_release(p, v)						\
59 do {										\
60 	switch (sizeof(*p)) {							\
61 	case 1:									\
62 		asm volatile ("stlrb %w1, %0"					\
63 				: "=Q" (*p)					\
64 				: "r" ((__u8)v)					\
65 				: "memory");					\
66 		break;								\
67 	case 2:									\
68 		asm volatile ("stlrh %w1, %0"					\
69 				: "=Q" (*p)					\
70 				: "r" ((__u16)v)				\
71 				: "memory");					\
72 		break;								\
73 	case 4:									\
74 		asm volatile ("stlr %w1, %0"					\
75 				: "=Q" (*p)					\
76 				: "r" ((__u32)v)				\
77 				: "memory");					\
78 		break;								\
79 	case 8:									\
80 		asm volatile ("stlr %1, %0"					\
81 				: "=Q" (*p)					\
82 				: "r" ((__u64)v)				\
83 				: "memory");					\
84 		break;								\
85 	}									\
86 } while (0)
87 
88 #define RSEQ_ASM_TMP_REG32	"w15"
89 #define RSEQ_ASM_TMP_REG	"x15"
90 #define RSEQ_ASM_TMP_REG_2	"x14"
91 
92 #define __RSEQ_ASM_DEFINE_TABLE(label, version, flags, start_ip,		\
93 				post_commit_offset, abort_ip)			\
94 	"	.pushsection	__rseq_cs, \"aw\"\n"				\
95 	"	.balign	32\n"							\
96 	__rseq_str(label) ":\n"							\
97 	"	.long	" __rseq_str(version) ", " __rseq_str(flags) "\n"	\
98 	"	.quad	" __rseq_str(start_ip) ", "				\
99 			  __rseq_str(post_commit_offset) ", "			\
100 			  __rseq_str(abort_ip) "\n"				\
101 	"	.popsection\n\t"						\
102 	"	.pushsection __rseq_cs_ptr_array, \"aw\"\n"				\
103 	"	.quad " __rseq_str(label) "b\n"					\
104 	"	.popsection\n"
105 
106 #define RSEQ_ASM_DEFINE_TABLE(label, start_ip, post_commit_ip, abort_ip)	\
107 	__RSEQ_ASM_DEFINE_TABLE(label, 0x0, 0x0, start_ip,			\
108 				(post_commit_ip - start_ip), abort_ip)
109 
110 /*
111  * Exit points of a rseq critical section consist of all instructions outside
112  * of the critical section where a critical section can either branch to or
113  * reach through the normal course of its execution. The abort IP and the
114  * post-commit IP are already part of the __rseq_cs section and should not be
115  * explicitly defined as additional exit points. Knowing all exit points is
116  * useful to assist debuggers stepping over the critical section.
117  */
118 #define RSEQ_ASM_DEFINE_EXIT_POINT(start_ip, exit_ip)				\
119 	"	.pushsection __rseq_exit_point_array, \"aw\"\n"			\
120 	"	.quad " __rseq_str(start_ip) ", " __rseq_str(exit_ip) "\n"	\
121 	"	.popsection\n"
122 
123 #define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs)			\
124 	RSEQ_INJECT_ASM(1)							\
125 	"	adrp	" RSEQ_ASM_TMP_REG ", " __rseq_str(cs_label) "\n"	\
126 	"	add	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
127 			", :lo12:" __rseq_str(cs_label) "\n"			\
128 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(rseq_cs) "]\n"	\
129 	__rseq_str(label) ":\n"
130 
131 #define RSEQ_ASM_DEFINE_ABORT(label, abort_label)				\
132 	"	b	222f\n"							\
133 	"	.inst 	"	__rseq_str(RSEQ_SIG_CODE) "\n"			\
134 	__rseq_str(label) ":\n"							\
135 	"	b	%l[" __rseq_str(abort_label) "]\n"			\
136 	"222:\n"
137 
138 #define RSEQ_ASM_OP_STORE(value, var)						\
139 	"	str	%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
140 
141 #define RSEQ_ASM_OP_STORE_RELEASE(value, var)					\
142 	"	stlr	%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
143 
144 #define RSEQ_ASM_OP_FINAL_STORE(value, var, post_commit_label)			\
145 	RSEQ_ASM_OP_STORE(value, var)						\
146 	__rseq_str(post_commit_label) ":\n"
147 
148 #define RSEQ_ASM_OP_FINAL_STORE_RELEASE(value, var, post_commit_label)		\
149 	RSEQ_ASM_OP_STORE_RELEASE(value, var)					\
150 	__rseq_str(post_commit_label) ":\n"
151 
152 #define RSEQ_ASM_OP_CMPEQ(var, expect, label)					\
153 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
154 	"	sub	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
155 			", %[" __rseq_str(expect) "]\n"				\
156 	"	cbnz	" RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
157 
158 #define RSEQ_ASM_OP_CMPEQ32(var, expect, label)					\
159 	"	ldr	" RSEQ_ASM_TMP_REG32 ", %[" __rseq_str(var) "]\n"	\
160 	"	sub	" RSEQ_ASM_TMP_REG32 ", " RSEQ_ASM_TMP_REG32		\
161 			", %w[" __rseq_str(expect) "]\n"			\
162 	"	cbnz	" RSEQ_ASM_TMP_REG32 ", " __rseq_str(label) "\n"
163 
164 #define RSEQ_ASM_OP_CMPNE(var, expect, label)					\
165 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
166 	"	sub	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
167 			", %[" __rseq_str(expect) "]\n"				\
168 	"	cbz	" RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
169 
170 #define RSEQ_ASM_CMP_CPU_ID(cpu_id, current_cpu_id, label)			\
171 	RSEQ_INJECT_ASM(2)							\
172 	RSEQ_ASM_OP_CMPEQ32(current_cpu_id, cpu_id, label)
173 
174 #define RSEQ_ASM_OP_R_LOAD(var)							\
175 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
176 
177 #define RSEQ_ASM_OP_R_STORE(var)						\
178 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
179 
180 #define RSEQ_ASM_OP_R_LOAD_OFF(offset)						\
181 	"	ldr	" RSEQ_ASM_TMP_REG ", [" RSEQ_ASM_TMP_REG		\
182 			", %[" __rseq_str(offset) "]]\n"
183 
184 #define RSEQ_ASM_OP_R_ADD(count)						\
185 	"	add	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
186 			", %[" __rseq_str(count) "]\n"
187 
188 #define RSEQ_ASM_OP_R_FINAL_STORE(var, post_commit_label)			\
189 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
190 	__rseq_str(post_commit_label) ":\n"
191 
192 #define RSEQ_ASM_OP_R_BAD_MEMCPY(dst, src, len)					\
193 	"	cbz	%[" __rseq_str(len) "], 333f\n"				\
194 	"	mov	" RSEQ_ASM_TMP_REG_2 ", %[" __rseq_str(len) "]\n"	\
195 	"222:	sub	" RSEQ_ASM_TMP_REG_2 ", " RSEQ_ASM_TMP_REG_2 ", #1\n"	\
196 	"	ldrb	" RSEQ_ASM_TMP_REG32 ", [%[" __rseq_str(src) "]"	\
197 			", " RSEQ_ASM_TMP_REG_2 "]\n"				\
198 	"	strb	" RSEQ_ASM_TMP_REG32 ", [%[" __rseq_str(dst) "]"	\
199 			", " RSEQ_ASM_TMP_REG_2 "]\n"				\
200 	"	cbnz	" RSEQ_ASM_TMP_REG_2 ", 222b\n"				\
201 	"333:\n"
202 
203 /* Per-cpu-id indexing. */
204 
205 #define RSEQ_TEMPLATE_CPU_ID
206 #define RSEQ_TEMPLATE_MO_RELAXED
207 #include "rseq-arm64-bits.h"
208 #undef RSEQ_TEMPLATE_MO_RELAXED
209 
210 #define RSEQ_TEMPLATE_MO_RELEASE
211 #include "rseq-arm64-bits.h"
212 #undef RSEQ_TEMPLATE_MO_RELEASE
213 #undef RSEQ_TEMPLATE_CPU_ID
214 
215 /* Per-mm-cid indexing. */
216 
217 #define RSEQ_TEMPLATE_MM_CID
218 #define RSEQ_TEMPLATE_MO_RELAXED
219 #include "rseq-arm64-bits.h"
220 #undef RSEQ_TEMPLATE_MO_RELAXED
221 
222 #define RSEQ_TEMPLATE_MO_RELEASE
223 #include "rseq-arm64-bits.h"
224 #undef RSEQ_TEMPLATE_MO_RELEASE
225 #undef RSEQ_TEMPLATE_MM_CID
226 
227 /* APIs which are not based on cpu ids. */
228 
229 #define RSEQ_TEMPLATE_CPU_ID_NONE
230 #define RSEQ_TEMPLATE_MO_RELAXED
231 #include "rseq-arm64-bits.h"
232 #undef RSEQ_TEMPLATE_MO_RELAXED
233 #undef RSEQ_TEMPLATE_CPU_ID_NONE
234